From ff620850cd236d4f35235593311a8aefc927a744 Mon Sep 17 00:00:00 2001 From: Nick Cabatoff Date: Tue, 13 Jun 2023 17:24:50 +0000 Subject: [PATCH] backport of commit 4654c15248013edeb36fdeae28ab3631bebe0a8e --- .copywrite.hcl | 3 +- .github/ISSUE_TEMPLATE/config.yml | 3 - .../scripts/generate-test-package-lists.sh | 35 +- .github/scripts/verify_changes.sh | 6 +- .github/workflows/actionlint.yml | 2 +- .github/workflows/backport.yml | 2 +- .github/workflows/build-vault-oss.yml | 14 +- .github/workflows/build.yml | 41 +- .github/workflows/changelog-checker.yml | 2 +- .github/workflows/ci.yml | 46 +- .../drepecated-functions-checker.yml | 31 - .github/workflows/enos-fmt.yml | 2 +- .../workflows/enos-release-testing-oss.yml | 3 +- .github/workflows/enos-run-k8s.yml | 4 +- .github/workflows/godoc-test-checker.yml | 4 +- .github/workflows/milestone-checker.yml | 10 +- .../workflows/nil-nil-function-checker.yml | 26 - .github/workflows/oss.yml | 6 +- .github/workflows/remove-labels.yml | 2 +- .github/workflows/security-scan.yml | 12 +- .github/workflows/setup-go-cache.yml | 4 +- .github/workflows/stable-website.yaml | 2 +- .github/workflows/test-ci-bootstrap.yml | 4 +- .github/workflows/test-ci-cleanup.yml | 8 +- .github/workflows/test-enos-scenario-ui.yml | 20 +- .github/workflows/test-go.yml | 35 +- .github/workflows/test-link-rewrites.yml | 16 + .../workflows/test-run-acc-tests-for-path.yml | 6 +- .../test-run-enos-scenario-matrix.yml | 10 +- .gitignore | 2 - .release/ci.hcl | 3 - .release/docker/docker-entrypoint.sh | 3 - .release/docker/ubi-docker-entrypoint.sh | 3 - .release/linux/package/etc/vault.d/vault.hcl | 3 - .release/release-metadata.hcl | 3 - .release/security-scan.hcl | 3 - CHANGELOG.md | 655 +- CODEOWNERS | 60 +- Dockerfile | 3 - Makefile | 37 +- api/api_test.go | 3 - api/auth.go | 3 - api/auth/approle/approle.go | 3 - api/auth/approle/approle_test.go | 3 - api/auth/approle/go.mod | 2 +- api/auth/approle/go.sum | 32 +- api/auth/aws/aws.go | 3 - api/auth/aws/go.mod | 2 +- api/auth/aws/go.sum | 32 +- api/auth/azure/azure.go | 3 - api/auth/azure/go.mod | 2 +- api/auth/azure/go.sum | 32 +- api/auth/gcp/gcp.go | 3 - api/auth/gcp/go.mod | 2 +- api/auth/gcp/go.sum | 28 +- api/auth/kubernetes/go.mod | 2 +- api/auth/kubernetes/go.sum | 32 +- api/auth/kubernetes/kubernetes.go | 3 - api/auth/ldap/go.mod | 2 +- api/auth/ldap/go.sum | 32 +- api/auth/ldap/ldap.go | 3 - api/auth/ldap/ldap_test.go | 3 - api/auth/userpass/go.mod | 2 +- api/auth/userpass/go.sum | 32 +- api/auth/userpass/userpass.go | 3 - api/auth/userpass/userpass_test.go | 3 - api/auth_test.go | 3 - api/auth_token.go | 3 - api/client.go | 13 - api/client_test.go | 3 - api/go.mod | 9 +- api/go.sum | 12 +- api/help.go | 3 - api/kv.go | 3 - api/kv_test.go | 3 - api/kv_v1.go | 3 - api/kv_v2.go | 3 - api/lifetime_watcher.go | 49 +- api/logical.go | 3 - api/output_policy.go | 35 +- api/output_policy_test.go | 83 - api/output_string.go | 3 - api/plugin_helpers.go | 15 +- api/plugin_helpers_test.go | 3 - api/plugin_types.go | 3 - api/renewer_test.go | 50 - api/replication_status.go | 130 - api/request.go | 3 - api/request_test.go | 3 - api/response.go | 3 - api/secret.go | 3 - api/ssh.go | 3 - api/ssh_agent.go | 3 - api/ssh_agent_test.go | 3 - api/sys.go | 3 - api/sys_audit.go | 3 - api/sys_auth.go | 3 - api/sys_capabilities.go | 3 - api/sys_config_cors.go | 3 - api/sys_generate_root.go | 3 - api/sys_hastatus.go | 3 - api/sys_health.go | 3 - api/sys_init.go | 3 - api/sys_leader.go | 3 - api/sys_leases.go | 3 - api/sys_mfa.go | 3 - api/sys_monitor.go | 3 - api/sys_mounts.go | 3 - api/sys_mounts_test.go | 3 - api/sys_plugins.go | 3 - api/sys_plugins_test.go | 3 - api/sys_policy.go | 3 - api/sys_raft.go | 3 - api/sys_rekey.go | 3 - api/sys_rotate.go | 3 - api/sys_seal.go | 3 - api/sys_stepdown.go | 3 - api/test-fixtures/agent_config.hcl | 3 - audit/audit.go | 3 - audit/format.go | 136 +- audit/format_json.go | 3 - audit/format_json_test.go | 3 - audit/format_jsonx.go | 3 - audit/format_jsonx_test.go | 3 - audit/format_test.go | 3 - audit/formatter.go | 3 - audit/hashstructure.go | 3 - audit/hashstructure_test.go | 3 - builtin/audit/file/backend.go | 3 - builtin/audit/file/backend_test.go | 3 - builtin/audit/socket/backend.go | 3 - builtin/audit/syslog/backend.go | 3 - builtin/credential/approle/backend.go | 4 - builtin/credential/approle/backend_test.go | 3 - .../credential/approle/cmd/approle/main.go | 9 +- builtin/credential/approle/path_login.go | 7 - builtin/credential/approle/path_login_test.go | 118 +- builtin/credential/approle/path_role.go | 104 - builtin/credential/approle/path_role_test.go | 920 +- .../credential/approle/path_tidy_user_id.go | 9 - .../approle/path_tidy_user_id_test.go | 34 +- builtin/credential/approle/validation.go | 3 - builtin/credential/approle/validation_test.go | 3 - builtin/credential/aws/backend.go | 40 +- builtin/credential/aws/backend_e2e_test.go | 3 - builtin/credential/aws/backend_test.go | 52 +- builtin/credential/aws/certificates.go | 3 - builtin/credential/aws/cli.go | 3 - builtin/credential/aws/client.go | 5 +- builtin/credential/aws/cmd/aws/main.go | 9 +- .../credential/aws/path_config_certificate.go | 27 - builtin/credential/aws/path_config_client.go | 22 - .../credential/aws/path_config_client_test.go | 3 - .../credential/aws/path_config_identity.go | 15 - .../aws/path_config_identity_test.go | 3 - .../credential/aws/path_config_rotate_root.go | 17 +- .../aws/path_config_rotate_root_test.go | 20 +- builtin/credential/aws/path_config_sts.go | 14 - .../path_config_tidy_identity_accesslist.go | 22 - .../aws/path_config_tidy_roletag_denylist.go | 22 - .../aws/path_identity_accesslist.go | 14 - builtin/credential/aws/path_login.go | 23 +- builtin/credential/aws/path_login_test.go | 3 - builtin/credential/aws/path_role.go | 22 - builtin/credential/aws/path_role_tag.go | 9 - builtin/credential/aws/path_role_test.go | 3 - .../credential/aws/path_roletag_denylist.go | 14 - .../aws/path_tidy_identity_accesslist.go | 10 - .../aws/path_tidy_roletag_denylist.go | 10 - builtin/credential/cert/backend.go | 5 - builtin/credential/cert/backend_test.go | 3 - builtin/credential/cert/cli.go | 3 - builtin/credential/cert/cmd/cert/main.go | 9 +- builtin/credential/cert/path_certs.go | 59 +- builtin/credential/cert/path_config.go | 24 +- builtin/credential/cert/path_crls.go | 13 - builtin/credential/cert/path_crls_test.go | 3 - builtin/credential/cert/path_login.go | 68 +- builtin/credential/cert/path_login_test.go | 3 - builtin/credential/cert/test_responder.go | 3 - builtin/credential/github/backend.go | 29 +- builtin/credential/github/backend_test.go | 3 - builtin/credential/github/cli.go | 3 - builtin/credential/github/cmd/github/main.go | 9 +- builtin/credential/github/path_config.go | 25 +- builtin/credential/github/path_config_test.go | 3 - builtin/credential/github/path_login.go | 9 - builtin/credential/github/path_login_test.go | 3 - builtin/credential/ldap/backend.go | 8 +- builtin/credential/ldap/backend_test.go | 4 - builtin/credential/ldap/cli.go | 27 +- builtin/credential/ldap/cmd/ldap/main.go | 9 +- builtin/credential/ldap/path_config.go | 30 +- builtin/credential/ldap/path_groups.go | 29 +- builtin/credential/ldap/path_login.go | 9 - builtin/credential/ldap/path_users.go | 32 +- builtin/credential/okta/backend.go | 8 +- builtin/credential/okta/backend_test.go | 3 - builtin/credential/okta/cli.go | 3 - builtin/credential/okta/cmd/okta/main.go | 9 +- builtin/credential/okta/path_config.go | 35 +- builtin/credential/okta/path_groups.go | 29 +- builtin/credential/okta/path_groups_test.go | 3 - builtin/credential/okta/path_login.go | 13 - builtin/credential/okta/path_users.go | 26 +- builtin/credential/radius/backend.go | 5 - builtin/credential/radius/backend_test.go | 10 +- builtin/credential/radius/cmd/radius/main.go | 9 +- builtin/credential/radius/path_config.go | 40 +- builtin/credential/radius/path_login.go | 10 - builtin/credential/radius/path_users.go | 29 +- builtin/credential/token/cli.go | 3 - builtin/credential/userpass/backend.go | 5 - builtin/credential/userpass/backend_test.go | 3 - builtin/credential/userpass/cli.go | 3 - .../credential/userpass/cmd/userpass/main.go | 9 +- builtin/credential/userpass/path_login.go | 9 - .../credential/userpass/path_user_password.go | 10 - .../credential/userpass/path_user_policies.go | 13 - builtin/credential/userpass/path_users.go | 26 +- builtin/credential/userpass/stepwise_test.go | 3 - builtin/logical/aws/backend.go | 33 +- builtin/logical/aws/backend_test.go | 31 +- builtin/logical/aws/client.go | 3 - builtin/logical/aws/cmd/aws/main.go | 9 +- builtin/logical/aws/iam_policies.go | 9 +- builtin/logical/aws/iam_policies_test.go | 12 +- builtin/logical/aws/path_config_lease.go | 25 +- builtin/logical/aws/path_config_root.go | 25 +- builtin/logical/aws/path_config_root_test.go | 5 +- .../logical/aws/path_config_rotate_root.go | 16 +- builtin/logical/aws/path_roles.go | 18 +- builtin/logical/aws/path_roles_test.go | 9 +- builtin/logical/aws/path_static_creds.go | 99 - builtin/logical/aws/path_static_creds_test.go | 92 - builtin/logical/aws/path_static_roles.go | 331 - builtin/logical/aws/path_static_roles_test.go | 490 - builtin/logical/aws/path_user.go | 43 +- builtin/logical/aws/rollback.go | 3 - builtin/logical/aws/rotation.go | 188 - builtin/logical/aws/rotation_test.go | 348 - builtin/logical/aws/secret_access_keys.go | 19 +- .../logical/aws/secret_access_keys_test.go | 7 +- builtin/logical/aws/stepwise_test.go | 3 - builtin/logical/consul/backend.go | 5 - builtin/logical/consul/backend_test.go | 3 - builtin/logical/consul/client.go | 3 - builtin/logical/consul/cmd/consul/main.go | 9 +- builtin/logical/consul/path_config.go | 27 +- builtin/logical/consul/path_roles.go | 14 - builtin/logical/consul/path_token.go | 10 - builtin/logical/consul/path_token_test.go | 3 - builtin/logical/consul/secret_token.go | 3 - builtin/logical/database/backend.go | 12 +- builtin/logical/database/backend_test.go | 3 - builtin/logical/database/credentials.go | 222 - builtin/logical/database/credentials_test.go | 249 - .../logical/database/dbplugin/plugin_test.go | 3 - builtin/logical/database/mocks_test.go | 3 - builtin/logical/database/mockv4.go | 3 - builtin/logical/database/mockv5.go | 3 - .../database/path_config_connection.go | 55 +- .../database/path_config_connection_test.go | 3 - builtin/logical/database/path_creds_create.go | 39 - builtin/logical/database/path_roles.go | 41 +- builtin/logical/database/path_roles_test.go | 3 - .../database/path_rotate_credentials.go | 22 - builtin/logical/database/rollback.go | 3 - builtin/logical/database/rollback_test.go | 3 - builtin/logical/database/rotation.go | 3 - builtin/logical/database/rotation_test.go | 3 - builtin/logical/database/secret_creds.go | 3 - builtin/logical/database/version_wrapper.go | 3 - .../logical/database/version_wrapper_test.go | 3 - .../logical/database/versioning_large_test.go | 3 - builtin/logical/nomad/backend.go | 5 - builtin/logical/nomad/backend_test.go | 13 +- builtin/logical/nomad/cmd/nomad/main.go | 9 +- builtin/logical/nomad/path_config_access.go | 42 +- builtin/logical/nomad/path_config_lease.go | 34 +- builtin/logical/nomad/path_creds_create.go | 10 - builtin/logical/nomad/path_roles.go | 14 - builtin/logical/nomad/secret_token.go | 3 - builtin/logical/pki/acme_authorizations.go | 187 - builtin/logical/pki/acme_billing.go | 25 - builtin/logical/pki/acme_billing_test.go | 318 - builtin/logical/pki/acme_challenge_engine.go | 535 - builtin/logical/pki/acme_challenges.go | 469 - builtin/logical/pki/acme_challenges_test.go | 703 - builtin/logical/pki/acme_eab_policy.go | 69 - builtin/logical/pki/acme_errors.go | 210 - builtin/logical/pki/acme_jws.go | 278 - builtin/logical/pki/acme_state.go | 659 - builtin/logical/pki/acme_state_test.go | 43 - builtin/logical/pki/acme_wrappers.go | 467 - builtin/logical/pki/acme_wrappers_test.go | 125 - builtin/logical/pki/backend.go | 260 +- builtin/logical/pki/backend_test.go | 183 +- builtin/logical/pki/ca_test.go | 3 - builtin/logical/pki/ca_util.go | 61 - builtin/logical/pki/cert_util.go | 291 +- builtin/logical/pki/cert_util_test.go | 3 - builtin/logical/pki/chain_test.go | 3 - builtin/logical/pki/chain_util.go | 3 - builtin/logical/pki/cmd/pki/main.go | 9 +- builtin/logical/pki/config_util.go | 3 - builtin/logical/pki/crl_test.go | 157 +- builtin/logical/pki/crl_util.go | 189 +- builtin/logical/pki/dnstest/server.go | 415 - builtin/logical/pki/fields.go | 97 - builtin/logical/pki/integration_test.go | 179 +- builtin/logical/pki/key_util.go | 3 - builtin/logical/pki/managed_key_util.go | 3 - builtin/logical/pki/path_acme_account.go | 474 - .../logical/pki/path_acme_authorizations.go | 100 - builtin/logical/pki/path_acme_challenges.go | 114 - builtin/logical/pki/path_acme_directory.go | 70 - builtin/logical/pki/path_acme_eab.go | 232 - builtin/logical/pki/path_acme_nonce.go | 73 - builtin/logical/pki/path_acme_order.go | 1084 - builtin/logical/pki/path_acme_order_test.go | 142 - builtin/logical/pki/path_acme_revoke.go | 176 - builtin/logical/pki/path_acme_test.go | 1578 - builtin/logical/pki/path_config_acme.go | 372 - builtin/logical/pki/path_config_acme_test.go | 132 - builtin/logical/pki/path_config_ca.go | 151 +- builtin/logical/pki/path_config_cluster.go | 80 - builtin/logical/pki/path_config_crl.go | 157 +- builtin/logical/pki/path_config_urls.go | 81 - builtin/logical/pki/path_fetch.go | 109 +- builtin/logical/pki/path_fetch_issuers.go | 262 +- builtin/logical/pki/path_fetch_keys.go | 111 +- builtin/logical/pki/path_intermediate.go | 52 +- builtin/logical/pki/path_issue_sign.go | 304 +- builtin/logical/pki/path_manage_issuers.go | 282 +- builtin/logical/pki/path_manage_keys.go | 72 +- builtin/logical/pki/path_manage_keys_test.go | 14 - builtin/logical/pki/path_ocsp.go | 53 +- builtin/logical/pki/path_ocsp_test.go | 6 - builtin/logical/pki/path_resign_crls.go | 42 - builtin/logical/pki/path_resign_crls_test.go | 7 - builtin/logical/pki/path_revoke.go | 164 +- builtin/logical/pki/path_roles.go | 401 - builtin/logical/pki/path_roles_test.go | 6 - builtin/logical/pki/path_root.go | 34 +- builtin/logical/pki/path_sign_issuers.go | 104 +- builtin/logical/pki/path_tidy.go | 955 +- builtin/logical/pki/path_tidy_test.go | 393 +- builtin/logical/pki/periodic.go | 3 - builtin/logical/pki/secret_certs.go | 3 - builtin/logical/pki/storage.go | 50 +- builtin/logical/pki/storage_migrations.go | 3 - .../logical/pki/storage_migrations_test.go | 3 - builtin/logical/pki/storage_test.go | 3 - builtin/logical/pki/storage_unified.go | 3 - builtin/logical/pki/test_helpers.go | 22 - builtin/logical/pki/util.go | 3 - builtin/logical/pkiext/nginx_test.go | 21 +- .../logical/pkiext/pkiext_binary/acme_test.go | 937 - .../pkiext/pkiext_binary/pki_cluster.go | 311 - .../logical/pkiext/pkiext_binary/pki_mount.go | 150 - builtin/logical/pkiext/test_helpers.go | 21 - builtin/logical/pkiext/zlint_test.go | 15 +- builtin/logical/rabbitmq/backend.go | 5 - builtin/logical/rabbitmq/backend_test.go | 5 +- builtin/logical/rabbitmq/cmd/rabbitmq/main.go | 9 +- builtin/logical/rabbitmq/passwords.go | 3 - .../rabbitmq/path_config_connection.go | 10 - .../rabbitmq/path_config_connection_test.go | 3 - builtin/logical/rabbitmq/path_config_lease.go | 26 +- .../rabbitmq/path_config_lease_test.go | 3 - builtin/logical/rabbitmq/path_role_create.go | 10 - .../logical/rabbitmq/path_role_create_test.go | 3 - builtin/logical/rabbitmq/path_roles.go | 11 - builtin/logical/rabbitmq/secret_creds.go | 3 - builtin/logical/ssh/backend.go | 5 - builtin/logical/ssh/backend_test.go | 389 +- builtin/logical/ssh/cmd/ssh/main.go | 9 +- .../ssh/path_cleanup_dynamic_host_keys.go | 8 - builtin/logical/ssh/path_config_ca.go | 32 +- builtin/logical/ssh/path_config_ca_test.go | 3 - .../logical/ssh/path_config_zeroaddress.go | 33 +- builtin/logical/ssh/path_creds_create.go | 12 - builtin/logical/ssh/path_fetch.go | 8 - builtin/logical/ssh/path_issue.go | 9 - builtin/logical/ssh/path_issue_sign.go | 5 +- builtin/logical/ssh/path_lookup.go | 12 - builtin/logical/ssh/path_roles.go | 14 - builtin/logical/ssh/path_sign.go | 9 - builtin/logical/ssh/path_verify.go | 8 - builtin/logical/ssh/secret_otp.go | 3 - builtin/logical/ssh/util.go | 3 - builtin/logical/totp/backend.go | 5 - builtin/logical/totp/backend_test.go | 3 - builtin/logical/totp/cmd/totp/main.go | 9 +- builtin/logical/totp/path_code.go | 25 +- builtin/logical/totp/path_keys.go | 37 +- builtin/logical/transit/backend.go | 12 +- builtin/logical/transit/backend_test.go | 301 +- builtin/logical/transit/cmd/transit/main.go | 9 +- builtin/logical/transit/managed_key_util.go | 3 - builtin/logical/transit/path_backup.go | 10 - builtin/logical/transit/path_backup_test.go | 3 - builtin/logical/transit/path_byok.go | 206 - builtin/logical/transit/path_byok_test.go | 229 - builtin/logical/transit/path_cache_config.go | 25 +- .../logical/transit/path_cache_config_test.go | 3 - builtin/logical/transit/path_config_keys.go | 25 +- .../logical/transit/path_config_keys_test.go | 3 - builtin/logical/transit/path_datakey.go | 10 - builtin/logical/transit/path_decrypt.go | 9 - .../transit/path_decrypt_bench_test.go | 3 - builtin/logical/transit/path_decrypt_test.go | 3 - builtin/logical/transit/path_encrypt.go | 9 - .../transit/path_encrypt_bench_test.go | 3 - builtin/logical/transit/path_encrypt_test.go | 50 - builtin/logical/transit/path_export.go | 163 +- builtin/logical/transit/path_export_test.go | 3 - builtin/logical/transit/path_hash.go | 10 - builtin/logical/transit/path_hash_test.go | 3 - builtin/logical/transit/path_hmac.go | 10 - builtin/logical/transit/path_hmac_test.go | 3 - builtin/logical/transit/path_import.go | 145 +- builtin/logical/transit/path_import_test.go | 451 +- builtin/logical/transit/path_keys.go | 78 +- builtin/logical/transit/path_keys_config.go | 38 +- .../logical/transit/path_keys_config_test.go | 3 - builtin/logical/transit/path_keys_test.go | 3 - builtin/logical/transit/path_random.go | 10 - builtin/logical/transit/path_random_test.go | 3 - builtin/logical/transit/path_restore.go | 10 - builtin/logical/transit/path_restore_test.go | 3 - builtin/logical/transit/path_rewrap.go | 9 - builtin/logical/transit/path_rewrap_test.go | 3 - builtin/logical/transit/path_rotate.go | 18 +- builtin/logical/transit/path_sign_verify.go | 29 +- .../logical/transit/path_sign_verify_test.go | 3 - builtin/logical/transit/path_trim.go | 12 +- builtin/logical/transit/path_trim_test.go | 3 - builtin/logical/transit/path_wrapping_key.go | 7 - .../logical/transit/path_wrapping_key_test.go | 3 - builtin/logical/transit/stepwise_test.go | 3 - builtin/plugin/backend.go | 48 +- builtin/plugin/backend_lazyLoad_test.go | 3 - builtin/plugin/backend_test.go | 3 - builtin/plugin/mock_plugin_test.go | 3 - builtin/plugin/v5/backend.go | 3 - changelog/17919.txt | 3 - changelog/17934.txt | 3 - changelog/18186.txt | 6 - changelog/18225.txt | 3 - changelog/18230.txt | 3 - changelog/18376.txt | 3 - changelog/18465.txt | 3 - changelog/18468.txt | 3 - changelog/18472.txt | 3 - changelog/18515.txt | 3 - changelog/18521.txt | 3 - changelog/18542.txt | 3 - changelog/18624.txt | 3 - changelog/18625.txt | 3 - changelog/18626.txt | 3 - changelog/18627.txt | 3 - changelog/18628.txt | 3 - changelog/18633.txt | 3 - changelog/18729.txt | 3 - changelog/18863.txt | 3 - changelog/18870.txt | 3 - changelog/19002.txt | 3 - changelog/19043.txt | 3 - changelog/19103.txt | 3 - changelog/19135.txt | 3 - changelog/19139.txt | 3 - changelog/19160.txt | 3 - changelog/19170.txt | 3 - changelog/19187.txt | 3 - changelog/19215.txt | 5 - changelog/19230.txt | 4 + changelog/19247.txt | 3 - changelog/19252.txt | 3 - changelog/19260.txt | 3 - changelog/19296.txt | 3 - changelog/19319.txt | 3 - changelog/19365.txt | 7 - changelog/19378.txt | 3 - changelog/19416.txt | 3 - changelog/19468.txt | 3 - changelog/19495.txt | 3 - changelog/19519.txt | 3 - changelog/19520.txt | 3 - changelog/19616.txt | 3 - changelog/19776.txt | 3 - changelog/19791.txt | 3 - changelog/19798.txt | 3 - changelog/19814.txt | 3 - changelog/19829.txt | 3 - changelog/19846.txt | 3 - changelog/19861.txt | 3 - changelog/19862.txt | 3 - changelog/19878.txt | 3 - changelog/19891.txt | 3 - changelog/19901.txt | 3 - changelog/19913.txt | 3 - changelog/19954.txt | 3 - changelog/19993.txt | 3 - changelog/20073.txt | 3 - changelog/20078.txt | 3 - changelog/20086.txt | 4 - changelog/20125.txt | 3 - changelog/20150.txt | 4 - changelog/20163.txt | 3 - changelog/20224.txt | 3 - changelog/20234.txt | 3 - changelog/20247.txt | 3 - changelog/20253.txt | 3 - changelog/20261.txt | 3 - changelog/20265.txt | 3 - changelog/20276.txt | 3 - changelog/20285.txt | 3 - changelog/20425.txt | 3 - changelog/20430.txt | 3 - changelog/20431.txt | 3 - changelog/20441.txt | 3 - changelog/20442.txt | 3 - changelog/20464.txt | 3 - changelog/20481.txt | 3 - changelog/20488.txt | 3 - changelog/20530.txt | 3 - changelog/20536.txt | 3 - changelog/20548.txt | 3 - changelog/20559.txt | 3 - changelog/20569.txt | 3 - changelog/20590.txt | 3 - changelog/20626.txt | 4 - changelog/20628.txt | 3 - changelog/20629.txt | 3 - changelog/20652.txt | 3 - changelog/20654.txt | 3 - changelog/20664.txt | 3 - changelog/20680.txt | 6 - changelog/20694.txt | 4 - changelog/20697.txt | 3 - changelog/20725.txt | 3 - changelog/20731.txt | 3 - changelog/20736.txt | 3 - changelog/20741.txt | 3 - changelog/20742.txt | 3 - changelog/20745.txt | 3 - changelog/20747.txt | 3 - changelog/20750.txt | 3 - changelog/20751.txt | 3 - changelog/20752.txt | 3 - changelog/20758.txt | 3 - changelog/20763.txt | 3 - changelog/20764.txt | 3 - changelog/20767.txt | 3 - changelog/20771.txt | 4 - changelog/20777.txt | 3 - changelog/20784.txt | 4 - changelog/20787.txt | 3 - changelog/20799.txt | 3 - changelog/20802.txt | 6 - changelog/20807.txt | 3 - changelog/20816.txt | 3 - changelog/20818.txt | 3 - changelog/20834.txt | 3 - changelog/20882.txt | 6 - changelog/20891.txt | 4 - changelog/20933.txt | 3 - changelog/20934.txt | 3 - changelog/20943.txt | 3 - changelog/20981.txt | 3 - changelog/20995.txt | 3 - changelog/21010.txt | 3 - changelog/_go-ver-1130.txt | 2 +- changelog/_go-ver-1132.txt | 3 + .../{_go-ver-1140.txt => _go-ver-1133.txt} | 0 changelog/pki-ui-improvements.txt | 3 - command/agent.go | 318 +- command/agent/alicloud_end_to_end_test.go | 11 +- command/agent/approle_end_to_end_test.go | 11 +- .../auth/alicloud/alicloud.go | 5 +- .../auth/approle/approle.go | 5 +- .../{agentproxyshared => agent}/auth/auth.go | 104 +- .../auth/auth_test.go | 7 +- .../auth/aws/aws.go | 5 +- .../auth/azure/azure.go | 5 +- .../auth/cert/cert.go | 16 +- .../auth/cert/cert_test.go | 61 +- .../auth/cert/test-fixtures/keys/cert.pem | 0 .../auth/cert/test-fixtures/keys/key.pem | 0 .../auth/cert/test-fixtures/keys/pkioutput | 0 .../auth/cert/test-fixtures/root/pkioutput | 0 .../auth/cert/test-fixtures/root/root.crl | 0 .../cert/test-fixtures/root/rootcacert.pem | 0 .../cert/test-fixtures/root/rootcakey.pem | 0 .../{agentproxyshared => agent}/auth/cf/cf.go | 5 +- .../auth/gcp/gcp.go | 5 +- .../auth/jwt/jwt.go | 92 +- .../auth/jwt/jwt_test.go | 97 +- .../kerberos/integtest/integrationtest.sh | 3 - .../auth/kerberos/kerberos.go | 5 +- .../auth/kerberos/kerberos_test.go | 5 +- .../auth/kubernetes/kubernetes.go | 5 +- .../auth/kubernetes/kubernetes_test.go | 5 +- .../auth/token-file/token_file.go | 5 +- .../auth/token-file/token_file_test.go | 5 +- ...auto_auth_preload_token_end_to_end_test.go | 11 +- command/agent/aws_end_to_end_test.go | 11 +- .../cache/api_proxy.go | 48 +- .../cache/api_proxy_test.go | 29 +- .../cache/cache_test.go | 7 +- .../cache/cacheboltdb/bolt.go | 3 - .../cache/cacheboltdb/bolt_test.go | 5 +- .../cache/cachememdb/cache_memdb.go | 3 - .../cache/cachememdb/cache_memdb_test.go | 3 - .../cache/cachememdb/index.go | 3 - .../cache/cachememdb/index_test.go | 3 - .../cache/handler.go | 5 +- .../cache/keymanager/manager.go | 3 - .../cache/keymanager/passthrough.go | 3 - .../cache/keymanager/passthrough_test.go | 3 - .../cache/lease_cache.go | 27 +- .../cache/lease_cache_test.go | 33 +- .../cache/listener.go | 3 - .../cache/proxy.go | 3 - .../cache/testing.go | 11 +- command/agent/cache_end_to_end_test.go | 22 +- command/agent/cert_end_to_end_test.go | 15 +- command/agent/cf_end_to_end_test.go | 11 +- command/agent/config/config.go | 323 +- command/agent/config/config_test.go | 247 +- .../bad-config-api_proxy-cache.hcl | 3 - ...-auto_auth-nosinks-nocache-notemplates.hcl | 3 - ...onfig-auto_auth-wrapped-multiple-sinks.hcl | 3 - ...config-cache-auto_auth-method-wrapping.hcl | 3 - ...onfig-cache-force-token-no-auth-method.hcl | 3 - ...ad-config-cache-inconsistent-auto_auth.hcl | 3 - .../bad-config-cache-no-listeners.hcl | 3 - .../bad-config-disable-idle-connections.hcl | 3 - .../bad-config-disable-keep-alives.hcl | 3 - ...-config-env-templates-disalowed-fields.hcl | 33 - ...ad-config-env-templates-invalid-signal.hcl | 26 - .../bad-config-env-templates-missing-exec.hcl | 30 - .../bad-config-env-templates-no-name.hcl | 26 - ...nfig-env-templates-with-file-templates.hcl | 40 - .../bad-config-env-templates-with-proxy.hcl | 47 - ...nfig-method-wrapping-and-sink-wrapping.hcl | 3 - ...i_proxy-auto_auth-all-api_proxy-config.hcl | 3 - .../config-cache-auto_auth-false.hcl | 3 - .../config-cache-auto_auth-force.hcl | 3 - .../config-cache-auto_auth-no-sink.hcl | 3 - .../config-cache-auto_auth-true.hcl | 3 - .../config-cache-embedded-type.hcl | 3 - .../config-cache-no-auto_auth.hcl | 3 - .../config-cache-no-listeners.hcl | 3 - .../config-cache-persist-empty-type.hcl | 3 - .../config-cache-persist-false.hcl | 3 - .../config/test-fixtures/config-cache.hcl | 3 - .../config-consistency-apiproxy.hcl | 3 - .../test-fixtures/config-consistency.hcl | 3 - .../config1.hcl | 3 - .../config2.hcl | 3 - .../config-dir-cache/config-cache1.hcl | 3 - .../config-dir-cache/config-cache2.hcl | 3 - .../config-dir-vault-block/config1.hcl | 3 - .../config-dir-vault-block/config2.hcl | 3 - .../config-disable-idle-connections-all.hcl | 3 - ...fig-disable-idle-connections-auto-auth.hcl | 3 - ...onfig-disable-idle-connections-caching.hcl | 3 - .../config-disable-idle-connections-empty.hcl | 3 - ...nfig-disable-idle-connections-proxying.hcl | 3 - ...ig-disable-idle-connections-templating.hcl | 3 - .../config-disable-keep-alives-all.hcl | 3 - .../config-disable-keep-alives-auto-auth.hcl | 3 - .../config-disable-keep-alives-caching.hcl | 3 - .../config-disable-keep-alives-empty.hcl | 3 - .../config-disable-keep-alives-proxying.hcl | 3 - .../config-disable-keep-alives-templating.hcl | 3 - .../test-fixtures/config-embedded-type.hcl | 3 - .../config-env-templates-complex.hcl | 36 - .../config-env-templates-simple.hcl | 18 - .../config-env-templates-with-source.hcl | 16 - .../config-method-exit-on-err.hcl | 3 - .../config-method-initial-backoff.hcl | 3 - .../test-fixtures/config-method-wrapping.hcl | 3 - .../config-template-full-nosink.hcl | 3 - .../test-fixtures/config-template-full.hcl | 3 - .../config-template-many-nosink.hcl | 3 - .../test-fixtures/config-template-many.hcl | 3 - .../config-template-min-nosink.hcl | 3 - .../test-fixtures/config-template-min.hcl | 3 - .../config-template-with-cache.hcl | 22 - .../config-template_config-empty.hcl | 3 - .../test-fixtures/config-template_config.hcl | 3 - .../config-vault-retry-empty.hcl | 3 - .../test-fixtures/config-vault-retry.hcl | 3 - command/agent/config/test-fixtures/config.hcl | 3 - command/agent/doc.go | 3 - command/agent/exec/exec.go | 332 - command/agent/exec/exec_test.go | 379 - command/agent/exec/test-app/main.go | 150 - .../agent/internal/ctmanager/runner_config.go | 149 - command/agent/jwt_end_to_end_test.go | 92 +- command/agent/oci_end_to_end_test.go | 231 - .../sink/file/file_sink.go | 5 +- .../sink/file/file_sink_test.go | 5 +- .../sink/file/sink_test.go | 5 +- .../sink/inmem/inmem_sink.go | 7 +- .../sink/mock/mock_sink.go | 5 +- .../{agentproxyshared => agent}/sink/sink.go | 3 - command/agent/template/template.go | 149 +- command/agent/template/template_test.go | 15 +- command/agent/testing.go | 11 +- command/agent/token_file_end_to_end_test.go | 11 +- .../winsvc/service.go | 3 - .../winsvc/service_windows.go | 3 - command/agent_generate_config.go | 441 - command/agent_generate_config_test.go | 274 - command/agent_test.go | 638 +- command/agentproxyshared/auth/oci/oci.go | 265 - command/agentproxyshared/helpers.go | 237 - command/agentproxyshared/helpers_test.go | 92 - command/approle_concurrency_integ_test.go | 3 - command/audit.go | 3 - command/audit_disable.go | 3 - command/audit_disable_test.go | 3 - command/audit_enable.go | 3 - command/audit_enable_test.go | 3 - command/audit_list.go | 3 - command/audit_list_test.go | 3 - command/auth.go | 3 - command/auth_disable.go | 3 - command/auth_disable_test.go | 3 - command/auth_enable.go | 3 - command/auth_enable_test.go | 3 - command/auth_help.go | 3 - command/auth_help_test.go | 3 - command/auth_list.go | 3 - command/auth_list_test.go | 3 - command/auth_move.go | 3 - command/auth_move_test.go | 3 - command/auth_test.go | 3 - command/auth_tune.go | 3 - command/auth_tune_test.go | 3 - command/base.go | 37 +- command/base_flags.go | 3 - command/base_flags_test.go | 3 - command/base_helpers.go | 3 - command/base_helpers_test.go | 3 - command/base_predict.go | 3 - command/base_predict_test.go | 3 - command/base_test.go | 3 - command/command_test.go | 49 +- command/commands.go | 21 +- command/commands_nonwindows.go | 3 - command/commands_windows.go | 3 - command/config.go | 3 - command/config/config.go | 3 - command/config/config_test.go | 3 - command/config/util.go | 3 - command/config/validate_listener.go | 3 - command/config_test.go | 3 - command/debug.go | 3 - command/debug_test.go | 3 - command/delete.go | 3 - command/delete_test.go | 3 - command/events.go | 3 - command/events_test.go | 3 - command/format.go | 3 - command/format_test.go | 3 - command/healthcheck/healthcheck.go | 3 - command/healthcheck/pki.go | 3 - command/healthcheck/pki_allow_acme_headers.go | 155 - .../pki_allow_if_modified_since.go | 3 - command/healthcheck/pki_audit_visibility.go | 3 - command/healthcheck/pki_ca_validity_period.go | 3 - .../healthcheck/pki_crl_validity_period.go | 3 - .../healthcheck/pki_enable_acme_issuance.go | 237 - command/healthcheck/pki_enable_auto_tidy.go | 3 - .../healthcheck/pki_hardware_backed_root.go | 3 - .../pki_role_allows_glob_wildcards.go | 3 - .../healthcheck/pki_role_allows_localhost.go | 3 - .../healthcheck/pki_role_no_store_false.go | 3 - command/healthcheck/pki_root_issued_leaves.go | 3 - command/healthcheck/pki_tidy_last_run.go | 3 - command/healthcheck/pki_too_many_certs.go | 3 - command/healthcheck/shared.go | 3 - command/healthcheck/util.go | 3 - command/kv.go | 3 - command/kv_delete.go | 9 +- command/kv_destroy.go | 5 +- command/kv_enable_versioning.go | 3 - command/kv_get.go | 5 +- command/kv_helpers.go | 93 +- command/kv_helpers_test.go | 272 - command/kv_list.go | 93 +- command/kv_metadata.go | 3 - command/kv_metadata_delete.go | 5 +- command/kv_metadata_get.go | 5 +- command/kv_metadata_patch.go | 5 +- command/kv_metadata_patch_test.go | 3 - command/kv_metadata_put.go | 5 +- command/kv_metadata_put_test.go | 3 - command/kv_patch.go | 5 +- command/kv_put.go | 5 +- command/kv_rollback.go | 5 +- command/kv_test.go | 128 - command/kv_undelete.go | 5 +- command/lease.go | 3 - command/lease_lookup.go | 3 - command/lease_lookup_test.go | 3 - command/lease_renew.go | 3 - command/lease_renew_test.go | 3 - command/lease_revoke.go | 3 - command/lease_revoke_test.go | 3 - command/list.go | 11 +- command/list_test.go | 3 - command/log_flags.go | 3 - command/log_flags_test.go | 3 - command/login.go | 3 - command/login_test.go | 3 - command/main.go | 3 - command/monitor.go | 3 - command/monitor_test.go | 3 - command/namespace.go | 3 - command/namespace_api_lock.go | 3 - command/namespace_api_unlock.go | 3 - command/namespace_create.go | 3 - command/namespace_delete.go | 3 - command/namespace_list.go | 3 - command/namespace_lookup.go | 3 - command/namespace_patch.go | 3 - command/operator.go | 3 - command/operator_diagnose.go | 3 - command/operator_diagnose_test.go | 3 - command/operator_generate_root.go | 3 - command/operator_generate_root_test.go | 3 - command/operator_init.go | 3 - command/operator_init_test.go | 3 - command/operator_key_status.go | 3 - command/operator_key_status_test.go | 3 - command/operator_members.go | 3 - command/operator_migrate.go | 3 - command/operator_migrate_test.go | 3 - command/operator_raft.go | 3 - command/operator_raft_autopilot_get_config.go | 3 - command/operator_raft_autopilot_set_config.go | 3 - command/operator_raft_autopilot_state.go | 3 - command/operator_raft_join.go | 3 - command/operator_raft_listpeers.go | 3 - command/operator_raft_remove_peer.go | 3 - command/operator_raft_snapshot.go | 3 - command/operator_raft_snapshot_restore.go | 3 - command/operator_raft_snapshot_save.go | 3 - command/operator_rekey.go | 3 - command/operator_rekey_test.go | 3 - command/operator_seal.go | 3 - command/operator_seal_test.go | 3 - command/operator_step_down.go | 3 - command/operator_step_down_test.go | 3 - command/operator_unseal.go | 3 - command/operator_unseal_test.go | 3 - command/operator_usage.go | 3 - command/patch.go | 3 - command/patch_test.go | 3 - command/path_help.go | 3 - command/path_help_test.go | 3 - command/pgp_test.go | 3 - command/pki.go | 3 - command/pki_health_check.go | 5 - command/pki_health_check_test.go | 61 +- command/pki_issue_intermediate.go | 3 - command/pki_issue_intermediate_test.go | 3 - command/pki_list_intermediate.go | 3 - command/pki_list_intermediate_test.go | 3 - command/pki_reissue_intermediate.go | 3 - command/pki_reissue_intermediate_test.go | 3 - command/pki_verify_sign.go | 3 - command/pki_verify_sign_test.go | 3 - command/plugin.go | 3 - command/plugin_deregister.go | 3 - command/plugin_deregister_test.go | 3 - command/plugin_info.go | 3 - command/plugin_info_test.go | 3 - command/plugin_list.go | 3 - command/plugin_list_test.go | 3 - command/plugin_register.go | 3 - command/plugin_register_test.go | 3 - command/plugin_reload.go | 3 - command/plugin_reload_status.go | 3 - command/plugin_reload_test.go | 3 - command/plugin_test.go | 3 - command/policy.go | 3 - command/policy_delete.go | 3 - command/policy_delete_test.go | 3 - command/policy_fmt.go | 3 - command/policy_fmt_test.go | 3 - command/policy_list.go | 3 - command/policy_list_test.go | 3 - command/policy_read.go | 3 - command/policy_read_test.go | 3 - command/policy_write.go | 3 - command/policy_write_test.go | 3 - command/print.go | 3 - command/print_token.go | 3 - command/proxy.go | 1116 - command/proxy/config/config.go | 832 - command/proxy/config/config_test.go | 119 - .../config-cache-embedded-type.hcl | 77 - .../config/test-fixtures/config-cache.hcl | 75 - .../proxy/test-fixtures/reload/reload_bar.key | 27 - .../proxy/test-fixtures/reload/reload_bar.pem | 20 - .../proxy/test-fixtures/reload/reload_ca.pem | 20 - .../proxy/test-fixtures/reload/reload_foo.key | 27 - .../proxy/test-fixtures/reload/reload_foo.pem | 20 - command/proxy_test.go | 1254 - command/read.go | 3 - command/read_test.go | 3 - command/rotate.go | 3 - command/rotate_test.go | 3 - command/secrets.go | 3 - command/secrets_disable.go | 3 - command/secrets_disable_test.go | 3 - command/secrets_enable.go | 3 - command/secrets_enable_test.go | 3 - command/secrets_list.go | 3 - command/secrets_list_test.go | 3 - command/secrets_move.go | 3 - command/secrets_move_test.go | 3 - command/secrets_tune.go | 3 - command/secrets_tune_test.go | 3 - command/server.go | 277 +- command/server/config.go | 14 - .../config_custom_response_headers_test.go | 3 - command/server/config_oss_test.go | 3 - command/server/config_telemetry_test.go | 3 - command/server/config_test.go | 3 - command/server/config_test_helpers.go | 14 +- command/server/config_test_helpers_util.go | 4 - command/server/config_util.go | 3 - command/server/hcp_link_config_test.go | 3 - command/server/listener.go | 3 - command/server/listener_tcp.go | 3 - command/server/listener_tcp_test.go | 3 - command/server/listener_test.go | 3 - command/server/listener_unix.go | 3 - command/server/listener_unix_test.go | 3 - .../server/server_seal_transit_acc_test.go | 5 +- .../server/test-fixtures/config-dir/baz.hcl | 3 - .../server/test-fixtures/config-dir/foo.hcl | 3 - command/server/test-fixtures/config.hcl | 3 - command/server/test-fixtures/config2.hcl | 3 - command/server/test-fixtures/config3.hcl | 3 - command/server/test-fixtures/config4.hcl | 3 - command/server/test-fixtures/config5.hcl | 3 - .../config_bad_https_storage.hcl | 3 - .../config_custom_response_headers_1.hcl | 3 - ...om_response_headers_multiple_listeners.hcl | 3 - .../config_diagnose_hastorage_bad_https.hcl | 3 - .../test-fixtures/config_diagnose_ok.hcl | 3 - command/server/test-fixtures/config_raft.hcl | 3 - command/server/test-fixtures/config_seals.hcl | 3 - command/server/test-fixtures/config_small.hcl | 3 - .../diagnose_bad_https_consul_sr.hcl | 3 - .../test-fixtures/diagnose_bad_telemetry1.hcl | 3 - .../test-fixtures/diagnose_bad_telemetry2.hcl | 3 - .../test-fixtures/diagnose_bad_telemetry3.hcl | 3 - .../diagnose_ok_storage_direct_access.hcl | 3 - .../diagnose_raft_no_bolt_folder.hcl | 3 - .../diagnose_seal_transit_tls_check.hcl | 3 - .../server/test-fixtures/hcp_link_config.hcl | 3 - .../server/test-fixtures/nostore_config.hcl | 3 - .../server/test-fixtures/raft_retry_join.hcl | 3 - .../telemetry/filter_default_override.hcl | 3 - .../telemetry/valid_prefix_filter.hcl | 3 - .../server/test-fixtures/tls_config_ok.hcl | 3 - .../test-fixtures/unauth_in_flight_access.hcl | 3 - command/server/tls_util.go | 3 - command/server_noprofile.go | 3 - command/server_profile.go | 3 - command/server_test.go | 66 - command/server_util.go | 3 - command/ssh.go | 3 - command/ssh_test.go | 3 - command/status.go | 3 - command/status_test.go | 3 - command/test-fixtures/config.hcl | 3 - command/test-fixtures/policy.hcl | 3 - command/token.go | 3 - command/token/helper.go | 3 - command/token/helper_external.go | 3 - command/token/helper_external_test.go | 3 - command/token/helper_internal.go | 3 - command/token/helper_internal_test.go | 3 - command/token/helper_testing.go | 3 - command/token/testing.go | 3 - command/token_capabilities.go | 3 - command/token_capabilities_test.go | 3 - command/token_create.go | 3 - command/token_create_test.go | 3 - command/token_lookup.go | 3 - command/token_lookup_test.go | 3 - command/token_renew.go | 3 - command/token_renew_test.go | 3 - command/token_revoke.go | 3 - command/token_revoke_test.go | 3 - command/transit.go | 3 - command/transit_import_key.go | 3 - command/transit_import_key_test.go | 3 - command/transit_import_key_version.go | 3 - command/unwrap.go | 3 - command/unwrap_test.go | 3 - command/util.go | 3 - command/version.go | 3 - command/version_history.go | 3 - command/version_history_test.go | 3 - command/version_test.go | 3 - command/write.go | 3 - command/write_test.go | 3 - enos/ci/aws-nuke.yml | 3 - enos/ci/bootstrap/main.tf | 3 - enos/ci/bootstrap/outputs.tf | 3 - enos/ci/bootstrap/variables.tf | 3 - enos/ci/service-user-iam/main.tf | 3 - enos/ci/service-user-iam/outputs.tf | 3 - enos/ci/service-user-iam/providers.tf | 3 - enos/ci/service-user-iam/service-quotas.tf | 15 +- enos/ci/service-user-iam/variables.tf | 3 - enos/enos-modules.hcl | 3 - enos/enos-providers.hcl | 3 - enos/enos-scenario-agent.hcl | 3 - enos/enos-scenario-autopilot.hcl | 3 - enos/enos-scenario-replication.hcl | 3 - enos/enos-scenario-smoke.hcl | 3 - enos/enos-scenario-ui.hcl | 3 - enos/enos-scenario-upgrade.hcl | 3 - enos/enos-terraform.hcl | 3 - enos/enos-variables.hcl | 3 - enos/enos.vars.hcl | 3 - enos/k8s/enos-modules-k8s.hcl | 3 - enos/k8s/enos-providers-k8s.hcl | 3 - enos/k8s/enos-scenario-k8s.hcl | 3 - enos/k8s/enos-terraform-k8s.hcl | 3 - enos/k8s/enos-variables-k8s.hcl | 3 - .../autopilot_upgrade_storageconfig/main.tf | 3 - enos/modules/az_finder/main.tf | 3 - enos/modules/backend_raft/main.tf | 3 - enos/modules/build_crt/main.tf | 3 - enos/modules/build_local/main.tf | 3 - enos/modules/build_local/scripts/build.sh | 3 - enos/modules/generate_secondary_token/main.tf | 3 - enos/modules/get_local_metadata/main.tf | 3 - .../get_local_metadata/scripts/build_date.sh | 3 - .../get_local_metadata/scripts/version.sh | 3 - enos/modules/k8s_deploy_vault/main.tf | 3 - enos/modules/k8s_deploy_vault/variables.tf | 3 - .../k8s_vault_verify_build_date/main.tf | 3 - .../k8s_vault_verify_build_date/variables.tf | 3 - .../k8s_vault_verify_replication/main.tf | 3 - .../scripts/smoke-verify-replication.sh | 3 - .../k8s_vault_verify_replication/variables.tf | 3 - enos/modules/k8s_vault_verify_ui/main.tf | 3 - .../scripts/smoke-verify-ui.sh | 3 - enos/modules/k8s_vault_verify_ui/variables.tf | 3 - enos/modules/k8s_vault_verify_version/main.tf | 3 - .../scripts/get-status.sh | 3 - .../scripts/smoke-verify-version.sh | 3 - .../k8s_vault_verify_version/variables.tf | 3 - .../k8s_vault_verify_write_data/main.tf | 3 - .../k8s_vault_verify_write_data/variables.tf | 3 - enos/modules/load_docker_image/main.tf | 3 - enos/modules/local_kind_cluster/main.tf | 3 - enos/modules/read_license/main.tf | 3 - enos/modules/shutdown_multiple_nodes/main.tf | 3 - enos/modules/shutdown_node/main.tf | 3 - enos/modules/vault_agent/main.tf | 3 - .../templates/set-up-approle-and-agent.sh | 3 - .../vault_artifactory_artifact/locals.tf | 3 - .../vault_artifactory_artifact/main.tf | 3 - .../vault_artifactory_artifact/outputs.tf | 3 - .../vault_artifactory_artifact/variables.tf | 3 - enos/modules/vault_get_cluster_ips/main.tf | 3 - .../scripts/get-leader-private-ip.sh | 3 - enos/modules/vault_raft_remove_peer/main.tf | 3 - .../templates/raft-remove-peer.sh | 3 - enos/modules/vault_setup_perf_primary/main.tf | 3 - .../scripts/configure-vault-pr-primary.sh | 3 - .../vault_setup_perf_secondary/main.tf | 3 - enos/modules/vault_test_ui/main.tf | 3 - enos/modules/vault_test_ui/outputs.tf | 3 - enos/modules/vault_test_ui/scripts/test_ui.sh | 3 - enos/modules/vault_test_ui/variables.tf | 3 - enos/modules/vault_unseal_nodes/main.tf | 3 - .../vault_unseal_nodes/scripts/unseal-node.sh | 3 - .../scripts/wait-until-sealed.sh | 3 - enos/modules/vault_upgrade/main.tf | 3 - .../templates/get-follower-public-ips.sh | 3 - .../templates/get-leader-public-ip.sh | 3 - .../vault_upgrade/templates/restart-vault.sh | 3 - .../modules/vault_verify_agent_output/main.tf | 3 - .../templates/verify-vault-agent-output.sh | 3 - enos/modules/vault_verify_autopilot/main.tf | 3 - .../templates/smoke-verify-autopilot.sh | 3 - .../main.tf | 3 - .../vault_verify_raft_auto_join_voter/main.tf | 3 - .../templates/verify-raft-auto-join-voter.sh | 3 - enos/modules/vault_verify_read_data/main.tf | 3 - .../scripts/verify-data.sh | 3 - enos/modules/vault_verify_replication/main.tf | 3 - .../templates/smoke-verify-replication.sh | 3 - .../vault_verify_replication/variables.tf | 3 - enos/modules/vault_verify_ui/main.tf | 3 - .../templates/smoke-verify-ui.sh | 3 - enos/modules/vault_verify_ui/variables.tf | 3 - enos/modules/vault_verify_undo_logs/main.tf | 3 - .../scripts/smoke-verify-undo-logs.sh | 3 - enos/modules/vault_verify_unsealed/main.tf | 3 - .../templates/verify-vault-node-unsealed.sh | 3 - enos/modules/vault_verify_version/main.tf | 3 - .../templates/verify-cluster-version.sh | 3 - enos/modules/vault_verify_write_data/main.tf | 3 - .../scripts/smoke-enable-secrets-kv.sh | 3 - .../scripts/smoke-write-test-data.sh | 3 - go.mod | 393 +- go.sum | 1978 +- helper/benchhelpers/benchhelpers.go | 3 - helper/builtinplugins/registry.go | 3 - helper/builtinplugins/registry_test.go | 114 - helper/constants/constants_oss.go | 3 - helper/constants/fips.go | 3 - helper/constants/fips_build_check.go | 3 - helper/constants/fips_cgo_check.go | 3 - helper/dhutil/dhutil.go | 3 - helper/dhutil/dhutil_test.go | 3 - helper/experiments/experiments.go | 3 - helper/fairshare/fairshare_testing_util.go | 3 - helper/fairshare/jobmanager.go | 3 - helper/fairshare/jobmanager_test.go | 3 - helper/fairshare/workerpool.go | 3 - helper/fairshare/workerpool_test.go | 3 - helper/flag-kv/flag.go | 3 - helper/flag-kv/flag_test.go | 3 - helper/flag-slice/flag.go | 3 - helper/flag-slice/flag_test.go | 3 - helper/forwarding/types.pb.go | 3 - helper/forwarding/types.proto | 3 - helper/forwarding/util.go | 3 - helper/forwarding/util_test.go | 3 - helper/hostutil/hostinfo.go | 3 - helper/hostutil/hostinfo_error.go | 3 - helper/hostutil/hostinfo_openbsd.go | 3 - helper/hostutil/hostinfo_test.go | 3 - helper/identity/identity.go | 3 - helper/identity/mfa/mfa.go | 3 - helper/identity/mfa/sentinel.go | 3 - helper/identity/mfa/types.pb.go | 3 - helper/identity/mfa/types.proto | 3 - helper/identity/sentinel.go | 3 - helper/identity/types.pb.go | 3 - helper/identity/types.proto | 3 - helper/locking/lock.go | 3 - helper/logging/logfile.go | 3 - helper/logging/logfile_test.go | 3 - helper/logging/logger.go | 3 - helper/logging/logger_test.go | 3 - helper/metricsutil/bucket.go | 3 - helper/metricsutil/bucket_test.go | 3 - helper/metricsutil/gauge_process.go | 28 +- helper/metricsutil/gauge_process_test.go | 9 +- helper/metricsutil/metricsutil.go | 3 - helper/metricsutil/metricsutil_test.go | 3 - helper/metricsutil/wrapped_metrics.go | 3 - helper/metricsutil/wrapped_metrics_test.go | 3 - helper/monitor/monitor.go | 3 - helper/monitor/monitor_test.go | 3 - helper/namespace/namespace.go | 3 - helper/namespace/namespace_test.go | 3 - helper/osutil/fileinfo.go | 3 - helper/osutil/fileinfo_test.go | 3 - helper/osutil/fileinfo_unix.go | 3 - helper/osutil/fileinfo_unix_test.go | 3 - helper/osutil/fileinfo_windows.go | 3 - helper/parseip/parseip.go | 3 - helper/parseip/parseip_test.go | 3 - helper/pgpkeys/encrypt_decrypt.go | 3 - helper/pgpkeys/flag.go | 3 - helper/pgpkeys/flag_test.go | 3 - helper/pgpkeys/keybase.go | 3 - helper/pgpkeys/keybase_test.go | 3 - helper/pgpkeys/test_keys.go | 3 - helper/policies/policies.go | 3 - helper/policies/policies_test.go | 3 - helper/proxyutil/proxyutil.go | 3 - helper/random/parser.go | 3 - helper/random/parser_test.go | 3 - helper/random/random_api.go | 3 - helper/random/registry.go | 3 - helper/random/registry_test.go | 3 - helper/random/rules.go | 3 - helper/random/rules_test.go | 3 - helper/random/serializing.go | 3 - helper/random/serializing_test.go | 3 - helper/random/string_generator.go | 3 - helper/random/string_generator_test.go | 3 - helper/storagepacker/storagepacker.go | 3 - helper/storagepacker/storagepacker_test.go | 3 - helper/storagepacker/types.pb.go | 3 - helper/storagepacker/types.proto | 3 - helper/testhelpers/azurite/azurite.go | 5 +- .../testhelpers/cassandra/cassandrahelper.go | 13 +- .../testhelpers/certhelpers/cert_helpers.go | 3 - helper/testhelpers/consul/consulhelper.go | 5 +- helper/testhelpers/corehelpers/corehelpers.go | 3 - .../testhelpers}/docker/testhelpers.go | 304 +- helper/testhelpers/etcd/etcdhelper.go | 5 +- .../fakegcsserver/fake-gcs-server.go | 5 +- helper/testhelpers/ldap/ldaphelper.go | 12 +- helper/testhelpers/logical/testing.go | 3 - helper/testhelpers/logical/testing_test.go | 3 - helper/testhelpers/minimal/minimal.go | 81 - helper/testhelpers/minio/miniohelper.go | 5 +- helper/testhelpers/mongodb/mongodbhelper.go | 5 +- helper/testhelpers/mssql/mssqlhelper.go | 11 +- helper/testhelpers/mysql/mysqlhelper.go | 12 +- .../pluginhelpers/pluginhelpers.go | 9 +- .../postgresql/postgresqlhelper.go | 25 +- helper/testhelpers/seal/sealhelper.go | 7 +- helper/testhelpers/testhelpers.go | 9 - helper/testhelpers/testhelpers_oss.go | 3 - .../testhelpers/teststorage/consul/consul.go | 3 - helper/testhelpers/teststorage/teststorage.go | 3 - .../teststorage/teststorage_reusable.go | 3 - helper/timeutil/timeutil.go | 26 - helper/timeutil/timeutil_test.go | 3 - helper/useragent/useragent.go | 79 - helper/useragent/useragent_test.go | 128 +- helper/versions/version.go | 3 - helper/versions/version_test.go | 3 - http/assets.go | 3 - http/assets_stub.go | 3 - http/auth_token_test.go | 3 - http/cors.go | 3 - http/custom_header_test.go | 3 - http/events.go | 3 - http/events_test.go | 3 - http/forwarded_for_test.go | 3 - http/forwarding_bench_test.go | 3 - http/forwarding_test.go | 3 - http/handler.go | 3 - http/handler_test.go | 3 - http/help.go | 3 - http/help_test.go | 3 - http/http_test.go | 3 - http/logical.go | 9 +- http/logical_test.go | 181 - http/plugin_test.go | 3 - http/sys_audit_test.go | 3 - http/sys_auth_test.go | 3 - http/sys_config_cors_test.go | 3 - http/sys_config_state_test.go | 3 - http/sys_feature_flags.go | 3 - http/sys_generate_root.go | 3 - http/sys_generate_root_test.go | 3 - http/sys_health.go | 3 - http/sys_health_test.go | 3 - http/sys_hostinfo_test.go | 3 - http/sys_in_flight_requests.go | 3 - http/sys_in_flight_requests_test.go | 3 - http/sys_init.go | 3 - http/sys_init_test.go | 6 +- http/sys_internal_test.go | 3 - http/sys_leader.go | 3 - http/sys_leader_test.go | 3 - http/sys_lease_test.go | 3 - http/sys_metrics.go | 3 - http/sys_metrics_test.go | 3 - http/sys_monitor_test.go | 3 - http/sys_mount_test.go | 3 - http/sys_mounts_test.go | 3 - http/sys_policy_test.go | 3 - http/sys_raft.go | 3 - http/sys_rekey.go | 3 - http/sys_rekey_test.go | 100 +- http/sys_rotate_test.go | 3 - http/sys_seal.go | 3 - http/sys_seal_test.go | 77 +- http/sys_wrapping_test.go | 3 - http/testing.go | 3 - http/unwrapping_raw_body_test.go | 3 - http/util.go | 3 - internal/go118_sha1_patch.go | 3 - internalshared/configutil/config.go | 3 - internalshared/configutil/config_util.go | 3 - internalshared/configutil/encrypt_decrypt.go | 3 - .../configutil/encrypt_decrypt_test.go | 3 - internalshared/configutil/hcp_link.go | 3 - .../configutil/http_response_headers.go | 3 - internalshared/configutil/kms.go | 3 - internalshared/configutil/lint.go | 3 - internalshared/configutil/listener.go | 10 - internalshared/configutil/listener_test.go | 3 - internalshared/configutil/merge.go | 3 - internalshared/configutil/telemetry.go | 3 - internalshared/configutil/telemetry_test.go | 3 - internalshared/configutil/userlockout.go | 3 - internalshared/configutil/userlockout_test.go | 3 - internalshared/listenerutil/bufconn.go | 3 - internalshared/listenerutil/listener.go | 3 - internalshared/listenerutil/listener_test.go | 3 - main.go | 3 - main_test.go | 3 - physical/aerospike/aerospike.go | 3 - physical/aerospike/aerospike_test.go | 12 +- physical/alicloudoss/alicloudoss.go | 3 - physical/alicloudoss/alicloudoss_test.go | 3 - physical/azure/azure.go | 3 - physical/azure/azure_test.go | 3 - physical/cassandra/cassandra.go | 3 - physical/cassandra/cassandra_test.go | 3 - physical/cockroachdb/cockroachdb.go | 3 - physical/cockroachdb/cockroachdb_ha.go | 3 - physical/cockroachdb/cockroachdb_test.go | 12 +- physical/cockroachdb/keywords.go | 3 - physical/consul/consul.go | 3 - physical/consul/consul_test.go | 3 - physical/consul/helpers.go | 3 - physical/couchdb/couchdb.go | 3 - physical/couchdb/couchdb_test.go | 13 +- physical/dynamodb/dynamodb.go | 3 - physical/dynamodb/dynamodb_test.go | 12 +- physical/etcd/etcd.go | 3 - physical/etcd/etcd3.go | 3 - physical/etcd/etcd3_test.go | 3 - physical/foundationdb/fdb-go-install.sh | 3 - physical/foundationdb/foundationdb.go | 3 - physical/foundationdb/foundationdb_test.go | 3 - physical/foundationdb/foundationdbstub.go | 3 - physical/gcs/gcs.go | 3 - physical/gcs/gcs_ha.go | 3 - physical/gcs/gcs_ha_test.go | 3 - physical/gcs/gcs_test.go | 3 - physical/manta/manta.go | 3 - physical/manta/manta_test.go | 3 - physical/mssql/mssql.go | 3 - physical/mssql/mssql_test.go | 3 - physical/mysql/mysql.go | 3 - physical/mysql/mysql_test.go | 3 - physical/postgresql/postgresql.go | 3 - physical/postgresql/postgresql_test.go | 3 - physical/raft/bolt_32bit_test.go | 3 - physical/raft/bolt_64bit_test.go | 3 - physical/raft/bolt_linux.go | 3 - physical/raft/chunking_test.go | 3 - physical/raft/fsm.go | 3 - physical/raft/fsm_test.go | 3 - physical/raft/msgpack.go | 3 - physical/raft/raft.go | 11 +- physical/raft/raft_autopilot.go | 3 - physical/raft/raft_test.go | 3 - physical/raft/raft_util.go | 3 - physical/raft/snapshot.go | 3 - physical/raft/snapshot_test.go | 3 - physical/raft/streamlayer.go | 3 - physical/raft/streamlayer_test.go | 3 - physical/raft/testing.go | 3 - physical/raft/types.pb.go | 3 - physical/raft/types.proto | 3 - physical/raft/vars_32bit.go | 3 - physical/raft/vars_64bit.go | 3 - physical/s3/s3.go | 3 - physical/s3/s3_test.go | 3 - physical/spanner/spanner.go | 3 - physical/spanner/spanner_ha.go | 3 - physical/spanner/spanner_ha_test.go | 3 - physical/spanner/spanner_test.go | 3 - physical/swift/swift.go | 3 - physical/swift/swift_test.go | 3 - physical/zookeeper/zookeeper.go | 3 - physical/zookeeper/zookeeper_test.go | 3 - .../cassandra-database-plugin/main.go | 3 - plugins/database/cassandra/cassandra.go | 3 - plugins/database/cassandra/cassandra_test.go | 3 - .../database/cassandra/connection_producer.go | 3 - .../cassandra/connection_producer_test.go | 3 - .../test-fixtures/no_tls/cassandra.yaml | 3 - plugins/database/cassandra/tls.go | 3 - .../hana/hana-database-plugin/main.go | 3 - plugins/database/hana/hana.go | 3 - plugins/database/hana/hana_test.go | 3 - .../database/influxdb/connection_producer.go | 3 - .../influxdb/influxdb-database-plugin/main.go | 3 - plugins/database/influxdb/influxdb.go | 3 - plugins/database/influxdb/influxdb_test.go | 18 +- plugins/database/mongodb/cert_helpers_test.go | 3 - .../database/mongodb/connection_producer.go | 3 - .../mongodb/connection_producer_test.go | 3 - .../mongodb/mongodb-database-plugin/main.go | 3 - plugins/database/mongodb/mongodb.go | 3 - plugins/database/mongodb/mongodb_test.go | 6 - plugins/database/mongodb/util.go | 3 - .../mssql/mssql-database-plugin/main.go | 3 - plugins/database/mssql/mssql.go | 3 - plugins/database/mssql/mssql_test.go | 3 - plugins/database/mysql/connection_producer.go | 3 - .../mysql/connection_producer_test.go | 3 - .../mysql/mysql-database-plugin/main.go | 3 - .../mysql-legacy-database-plugin/main.go | 3 - plugins/database/mysql/mysql.go | 3 - plugins/database/mysql/mysql_test.go | 3 - .../postgresql/passwordauthentication.go | 25 - .../postgresql-database-plugin/main.go | 3 - plugins/database/postgresql/postgresql.go | 60 +- .../database/postgresql/postgresql_test.go | 97 +- plugins/database/postgresql/scram/LICENSE | 21 - plugins/database/postgresql/scram/scram.go | 86 - .../database/postgresql/scram/scram_test.go | 27 - .../redshift/redshift-database-plugin/main.go | 3 - plugins/database/redshift/redshift.go | 3 - plugins/database/redshift/redshift_test.go | 3 - scan.hcl | 3 - scripts/assetcheck.sh | 3 - scripts/build.sh | 11 +- scripts/ci-helper.sh | 7 +- scripts/coverage.sh | 3 - scripts/cross/Dockerfile | 5 +- scripts/deprecations-checker.sh | 52 - scripts/deps_upgrade.py | 3 - scripts/dist.sh | 3 - scripts/docker/Dockerfile | 3 - scripts/docker/Dockerfile.ui | 2 +- scripts/docker/docker-entrypoint.sh | 3 - scripts/gen_openapi.sh | 133 +- scripts/gofmtcheck.sh | 3 - scripts/goversioncheck.sh | 3 - scripts/protocversioncheck.sh | 3 - scripts/semgrep_plugin_repos.sh | 3 - scripts/testciphers.sh | 3 - scripts/update_deps.sh | 3 - scripts/update_plugin_modules.sh | 3 - sdk/database/dbplugin/client.go | 3 - sdk/database/dbplugin/database.pb.go | 3 - sdk/database/dbplugin/database.proto | 3 - sdk/database/dbplugin/databasemiddleware.go | 3 - sdk/database/dbplugin/grpc_transport.go | 3 - sdk/database/dbplugin/plugin.go | 3 - sdk/database/dbplugin/server.go | 3 - sdk/database/dbplugin/v5/conversions_test.go | 4 - sdk/database/dbplugin/v5/database.go | 10 - sdk/database/dbplugin/v5/grpc_client.go | 8 - sdk/database/dbplugin/v5/grpc_client_test.go | 3 - .../dbplugin/v5/grpc_database_plugin.go | 3 - sdk/database/dbplugin/v5/grpc_server.go | 4 - sdk/database/dbplugin/v5/grpc_server_test.go | 3 - sdk/database/dbplugin/v5/marshalling.go | 3 - sdk/database/dbplugin/v5/middleware.go | 3 - sdk/database/dbplugin/v5/middleware_test.go | 3 - sdk/database/dbplugin/v5/plugin_client.go | 3 - .../dbplugin/v5/plugin_client_test.go | 3 - sdk/database/dbplugin/v5/plugin_factory.go | 3 - sdk/database/dbplugin/v5/plugin_server.go | 3 - sdk/database/dbplugin/v5/proto/database.pb.go | 199 +- sdk/database/dbplugin/v5/proto/database.proto | 4 - .../dbplugin/v5/testing/test_helpers.go | 3 - sdk/database/helper/connutil/connutil.go | 3 - sdk/database/helper/connutil/sql.go | 3 - sdk/database/helper/connutil/sql_test.go | 3 - sdk/database/helper/credsutil/credsutil.go | 3 - .../helper/credsutil/credsutil_test.go | 3 - sdk/database/helper/credsutil/sql.go | 3 - sdk/database/helper/credsutil/usernames.go | 3 - .../helper/credsutil/usernames_test.go | 3 - sdk/database/helper/dbutil/dbutil.go | 3 - sdk/database/helper/dbutil/dbutil_test.go | 3 - sdk/framework/backend.go | 5 - sdk/framework/backend_test.go | 24 - sdk/framework/field_data.go | 3 - sdk/framework/field_data_test.go | 3 - sdk/framework/field_type.go | 11 +- sdk/framework/filter.go | 3 - sdk/framework/identity.go | 3 - sdk/framework/identity_test.go | 3 - sdk/framework/lease.go | 3 - sdk/framework/lease_test.go | 3 - sdk/framework/openapi.go | 203 +- sdk/framework/openapi_test.go | 273 +- sdk/framework/path.go | 32 - sdk/framework/path_map.go | 3 - sdk/framework/path_map_test.go | 3 - sdk/framework/path_struct.go | 3 - sdk/framework/path_struct_test.go | 3 - sdk/framework/path_test.go | 3 - sdk/framework/policy_map.go | 3 - sdk/framework/policy_map_test.go | 3 - sdk/framework/secret.go | 3 - sdk/framework/secret_test.go | 3 - sdk/framework/template.go | 3 - sdk/framework/testdata/legacy.json | 17 +- sdk/framework/testdata/operations.json | 34 +- sdk/framework/testdata/operations_list.json | 13 +- sdk/framework/testdata/responses.json | 17 +- sdk/framework/testing.go | 3 - sdk/framework/wal.go | 3 - sdk/framework/wal_test.go | 3 - sdk/go.mod | 105 +- sdk/go.sum | 331 +- sdk/helper/authmetadata/auth_metadata.go | 3 - .../authmetadata/auth_metadata_acc_test.go | 3 - sdk/helper/authmetadata/auth_metadata_test.go | 3 - sdk/helper/base62/base62.go | 3 - sdk/helper/certutil/certutil_test.go | 64 - sdk/helper/certutil/helpers.go | 76 +- sdk/helper/certutil/types.go | 3 - sdk/helper/cidrutil/cidr.go | 3 - sdk/helper/cidrutil/cidr_test.go | 3 - sdk/helper/compressutil/compress.go | 3 - sdk/helper/compressutil/compress_test.go | 3 - sdk/helper/consts/agent.go | 3 - sdk/helper/consts/consts.go | 3 - sdk/helper/consts/deprecation_status.go | 3 - sdk/helper/consts/error.go | 3 - sdk/helper/consts/plugin_types.go | 3 - sdk/helper/consts/proxy.go | 15 - sdk/helper/consts/replication.go | 3 - sdk/helper/consts/token_consts.go | 3 - sdk/helper/cryptoutil/cryptoutil.go | 3 - sdk/helper/cryptoutil/cryptoutil_test.go | 3 - sdk/helper/custommetadata/custom_metadata.go | 3 - .../custommetadata/custom_metadata_test.go | 3 - sdk/helper/dbtxn/dbtxn.go | 3 - sdk/helper/errutil/error.go | 3 - sdk/helper/hclutil/hcl.go | 3 - sdk/helper/identitytpl/templating.go | 3 - sdk/helper/identitytpl/templating_test.go | 3 - sdk/helper/jsonutil/json.go | 3 - sdk/helper/jsonutil/json_test.go | 3 - sdk/helper/kdf/kdf.go | 3 - sdk/helper/kdf/kdf_test.go | 3 - sdk/helper/keysutil/cache.go | 3 - sdk/helper/keysutil/consts.go | 3 - sdk/helper/keysutil/encrypted_key_storage.go | 3 - .../keysutil/encrypted_key_storage_test.go | 3 - sdk/helper/keysutil/lock_manager.go | 8 +- sdk/helper/keysutil/managed_key_util.go | 3 - sdk/helper/keysutil/policy.go | 509 +- sdk/helper/keysutil/policy_test.go | 5 +- sdk/helper/keysutil/transit_lru.go | 3 - sdk/helper/keysutil/transit_syncmap.go | 3 - sdk/helper/keysutil/util.go | 36 - sdk/helper/ldaputil/client.go | 57 +- sdk/helper/ldaputil/client_test.go | 3 - sdk/helper/ldaputil/config.go | 25 - sdk/helper/ldaputil/config_test.go | 4 - sdk/helper/ldaputil/connection.go | 3 - sdk/helper/ldaputil/ldap.go | 3 - sdk/helper/license/feature.go | 3 - sdk/helper/locksutil/locks.go | 3 - sdk/helper/locksutil/locks_test.go | 3 - sdk/helper/logging/logging.go | 3 - sdk/helper/logging/logging_test.go | 3 - sdk/helper/mlock/mlock.go | 3 - sdk/helper/ocsp/ocsp_test.go | 167 + sdk/helper/parseutil/parseutil.go | 3 - sdk/helper/password/password.go | 3 - sdk/helper/pathmanager/pathmanager.go | 3 - sdk/helper/pathmanager/pathmanager_test.go | 3 - sdk/helper/pluginutil/env.go | 3 - sdk/helper/pluginutil/env_test.go | 3 - sdk/helper/pluginutil/multiplexing.go | 3 - sdk/helper/pluginutil/multiplexing.pb.go | 3 - sdk/helper/pluginutil/multiplexing.proto | 3 - sdk/helper/pluginutil/multiplexing_test.go | 3 - sdk/helper/pluginutil/run_config.go | 3 - sdk/helper/pluginutil/run_config_test.go | 3 - sdk/helper/pluginutil/runner.go | 3 - sdk/helper/pluginutil/tls.go | 3 - sdk/helper/pointerutil/pointer.go | 3 - sdk/helper/policyutil/policyutil.go | 3 - sdk/helper/policyutil/policyutil_test.go | 3 - sdk/helper/roottoken/decode.go | 3 - sdk/helper/roottoken/encode.go | 3 - sdk/helper/roottoken/encode_test.go | 3 - sdk/helper/roottoken/otp.go | 3 - sdk/helper/roottoken/otp_test.go | 3 - sdk/helper/salt/salt.go | 3 - sdk/helper/salt/salt_test.go | 3 - sdk/helper/strutil/strutil.go | 3 - sdk/helper/template/funcs.go | 3 - sdk/helper/template/funcs_test.go | 3 - sdk/helper/template/template.go | 3 - sdk/helper/template/template_test.go | 3 - sdk/helper/testcluster/consts.go | 12 - sdk/helper/testcluster/docker/cert.go | 88 - sdk/helper/testcluster/docker/environment.go | 1062 - sdk/helper/testcluster/docker/replication.go | 71 - sdk/helper/testcluster/exec.go | 324 - sdk/helper/testcluster/logging.go | 37 - sdk/helper/testcluster/replication.go | 905 - sdk/helper/testcluster/types.go | 111 - sdk/helper/testcluster/util.go | 356 - sdk/helper/testhelpers/output.go | 81 - sdk/helper/testhelpers/output_test.go | 45 - .../testhelpers/schema/response_validation.go | 96 +- .../schema/response_validation_test.go | 87 - sdk/helper/tlsutil/tlsutil.go | 3 - sdk/helper/tokenutil/tokenutil.go | 13 +- sdk/helper/useragent/useragent.go | 3 - sdk/helper/useragent/useragent_test.go | 3 - sdk/helper/wrapping/wrapinfo.go | 3 - sdk/helper/xor/xor.go | 3 - sdk/helper/xor/xor_test.go | 3 - sdk/logical/acme_billing.go | 10 - sdk/logical/audit.go | 3 - sdk/logical/auth.go | 3 - sdk/logical/connection.go | 3 - sdk/logical/controlgroup.go | 3 - sdk/logical/error.go | 3 - sdk/logical/event.pb.go | 3 - sdk/logical/event.proto | 3 - sdk/logical/events.go | 3 - sdk/logical/identity.pb.go | 3 - sdk/logical/identity.proto | 3 - sdk/logical/lease.go | 3 - sdk/logical/lease_test.go | 3 - sdk/logical/logical.go | 8 +- sdk/logical/logical_storage.go | 3 - sdk/logical/managed_key.go | 3 - sdk/logical/plugin.pb.go | 3 - sdk/logical/plugin.proto | 3 - sdk/logical/request.go | 55 - sdk/logical/response.go | 3 - sdk/logical/response_util.go | 5 +- sdk/logical/response_util_test.go | 11 - sdk/logical/secret.go | 3 - sdk/logical/storage.go | 3 - sdk/logical/storage_inmem.go | 3 - sdk/logical/storage_inmem_test.go | 3 - sdk/logical/storage_test.go | 3 - sdk/logical/storage_view.go | 3 - sdk/logical/system_view.go | 3 - sdk/logical/testing.go | 3 - sdk/logical/token.go | 3 - sdk/logical/token_test.go | 3 - sdk/logical/translate_response.go | 3 - sdk/logical/version.pb.go | 3 - sdk/logical/version.proto | 3 - sdk/physical/cache.go | 3 - sdk/physical/encoding.go | 3 - sdk/physical/entry.go | 3 - sdk/physical/error.go | 3 - sdk/physical/file/file.go | 3 - sdk/physical/file/file_test.go | 3 - sdk/physical/inmem/cache_test.go | 3 - sdk/physical/inmem/inmem.go | 3 - sdk/physical/inmem/inmem_ha.go | 3 - sdk/physical/inmem/inmem_ha_test.go | 3 - sdk/physical/inmem/inmem_test.go | 3 - sdk/physical/inmem/physical_view_test.go | 3 - sdk/physical/inmem/transactions_test.go | 3 - sdk/physical/latency.go | 3 - sdk/physical/physical.go | 3 - sdk/physical/physical_access.go | 3 - sdk/physical/physical_view.go | 3 - sdk/physical/testing.go | 3 - sdk/physical/transactions.go | 3 - sdk/plugin/backend.go | 3 - sdk/plugin/grpc_backend.go | 3 - sdk/plugin/grpc_backend_client.go | 3 - sdk/plugin/grpc_backend_server.go | 3 - sdk/plugin/grpc_backend_test.go | 3 - sdk/plugin/grpc_events.go | 3 - sdk/plugin/grpc_storage.go | 3 - sdk/plugin/grpc_system.go | 3 - sdk/plugin/grpc_system_test.go | 3 - sdk/plugin/logger.go | 3 - sdk/plugin/logger_test.go | 3 - sdk/plugin/middleware.go | 3 - sdk/plugin/mock/backend.go | 3 - sdk/plugin/mock/backend_test.go | 3 - sdk/plugin/mock/path_errors.go | 3 - sdk/plugin/mock/path_internal.go | 3 - sdk/plugin/mock/path_kv.go | 3 - sdk/plugin/mock/path_raw.go | 3 - sdk/plugin/mock/path_special.go | 3 - sdk/plugin/pb/backend.pb.go | 3 - sdk/plugin/pb/backend.proto | 3 - sdk/plugin/pb/translation.go | 3 - sdk/plugin/pb/translation_test.go | 3 - sdk/plugin/plugin.go | 15 +- sdk/plugin/plugin_v5.go | 8 +- sdk/plugin/serve.go | 3 - sdk/plugin/storage_test.go | 3 - sdk/queue/priority_queue.go | 3 - sdk/queue/priority_queue_test.go | 3 - .../consul/consul_service_registration.go | 3 - .../consul_service_registration_test.go | 3 - .../kubernetes/client/client.go | 3 - .../kubernetes/client/client_test.go | 3 - .../kubernetes/client/cmd/kubeclient/main.go | 3 - .../kubernetes/client/config.go | 3 - .../kubernetes/retry_handler.go | 3 - .../kubernetes/retry_handler_test.go | 3 - .../kubernetes/service_registration.go | 3 - .../kubernetes/service_registration_test.go | 3 - .../kubernetes/testing/testserver.go | 3 - serviceregistration/service_registration.go | 3 - shamir/shamir.go | 3 - shamir/shamir_test.go | 3 - tools/godoctests/main.go | 3 - tools/godoctests/pkg/analyzer/analyzer.go | 3 - .../godoctests/pkg/analyzer/analyzer_test.go | 3 - .../godoctests/pkg/analyzer/testdata/funcs.go | 3 - tools/gonilnilfunctions/main.go | 13 - .../pkg/analyzer/analyzer.go | 171 - .../pkg/analyzer/analyzer_test.go | 23 - .../pkg/analyzer/testdata/funcs.go | 73 - tools/semgrep/ci/atomic.yml | 3 - tools/semgrep/ci/bad-multierror-append.yml | 3 - tools/semgrep/ci/bad-nil-guard.yml | 3 - tools/semgrep/ci/error-shadowing.yml | 3 - tools/semgrep/ci/fmt-printf.yml | 3 - tools/semgrep/ci/hashsum.yml | 3 - tools/semgrep/ci/hmac-bytes.yml | 3 - tools/semgrep/ci/hmac-hash.yml | 3 - tools/semgrep/ci/logger-format-string.yml | 3 - tools/semgrep/ci/loop-time-after.yml | 3 - tools/semgrep/ci/loopclosure.yml | 3 - tools/semgrep/ci/no-nil-check.yml | 3 - tools/semgrep/ci/oddifsequence.yml | 3 - tools/semgrep/ci/return-nil-error.yml | 3 - tools/semgrep/ci/return-nil.yml | 3 - tools/semgrep/ci/wrongerrcall.yml | 3 - tools/semgrep/ci/wronglock.yml | 3 - tools/semgrep/hostport.yml | 3 - tools/semgrep/joinpath.yml | 3 - tools/semgrep/lock-not-unlocked-on-return.yml | 3 - tools/semgrep/logger-sprintf.yml | 3 - .../paths-with-callbacks-and-operations.yml | 3 - tools/semgrep/paths-with-callbacks.yml | 3 - tools/semgrep/physical-storage.yml | 3 - tools/semgrep/replication-has-state.yml | 3 - tools/semgrep/self-equals.yml | 3 - tools/tools.go | 6 - ui/.eslintrc.js | 5 - ui/.github/workflows/ci.yml | 45 + ui/.gitignore | 9 - ui/.nvmrc | 1 + ui/.prettierrc.js | 5 - ui/.template-lintrc.js | 5 - ui/.yarn/releases/yarn-1.19.1.js | 147216 ++++++++++++ ui/.yarn/releases/yarn-1.22.19.js | 175346 +++++++++++++++ ui/.yarn/releases/yarn-3.5.0.cjs | 873 - ui/.yarnrc.yml | 3 - ui/README.md | 144 +- ui/app/adapters/application.js | 5 - ui/app/adapters/auth-config/_base.js | 5 - ui/app/adapters/auth-config/aws/client.js | 5 - .../auth-config/aws/identity-accesslist.js | 5 - .../auth-config/aws/roletag-denylist.js | 5 - ui/app/adapters/auth-config/azure.js | 5 - ui/app/adapters/auth-config/gcp.js | 5 - ui/app/adapters/auth-config/github.js | 5 - ui/app/adapters/auth-config/jwt.js | 5 - ui/app/adapters/auth-config/kubernetes.js | 5 - ui/app/adapters/auth-config/ldap.js | 5 - ui/app/adapters/auth-config/oidc.js | 5 - ui/app/adapters/auth-config/okta.js | 5 - ui/app/adapters/auth-config/radius.js | 5 - ui/app/adapters/auth-method.js | 5 - ui/app/adapters/aws-credential.js | 5 - ui/app/adapters/capabilities.js | 5 - ui/app/adapters/clients/activity.js | 5 - ui/app/adapters/clients/config.js | 5 - ui/app/adapters/clients/version-history.js | 5 - ui/app/adapters/cluster.js | 5 - ui/app/adapters/console.js | 5 - ui/app/adapters/control-group-config.js | 5 - ui/app/adapters/control-group.js | 5 - ui/app/adapters/database/connection.js | 5 - ui/app/adapters/database/credential.js | 5 - ui/app/adapters/database/role.js | 5 - ui/app/adapters/generated-item-list.js | 5 - ui/app/adapters/identity/base.js | 5 - ui/app/adapters/identity/entity-alias.js | 5 - ui/app/adapters/identity/entity-merge.js | 5 - ui/app/adapters/identity/entity.js | 5 - ui/app/adapters/identity/group-alias.js | 5 - ui/app/adapters/identity/group.js | 5 - ui/app/adapters/keymgmt/key.js | 5 - ui/app/adapters/keymgmt/provider.js | 5 - ui/app/adapters/kmip/base.js | 5 - ui/app/adapters/kmip/ca.js | 5 - ui/app/adapters/kmip/config.js | 5 - ui/app/adapters/kmip/credential.js | 5 - ui/app/adapters/kmip/role.js | 5 - ui/app/adapters/kmip/scope.js | 5 - ui/app/adapters/kubernetes/config.js | 5 - ui/app/adapters/kubernetes/role.js | 5 - ui/app/adapters/lease.js | 5 - ui/app/adapters/license.js | 5 - ui/app/adapters/mfa-login-enforcement.js | 5 - ui/app/adapters/mfa-method.js | 5 - ui/app/adapters/mfa-setup.js | 5 - ui/app/adapters/named-path.js | 5 - ui/app/adapters/namespace.js | 5 - ui/app/adapters/node.js | 5 - ui/app/adapters/oidc/assignment.js | 5 - ui/app/adapters/oidc/client.js | 5 - ui/app/adapters/oidc/key.js | 5 - ui/app/adapters/oidc/provider.js | 5 - ui/app/adapters/oidc/scope.js | 5 - ui/app/adapters/path-filter-config.js | 5 - ui/app/adapters/permissions.js | 5 - ui/app/adapters/pki-ca-certificate-sign.js | 8 + ui/app/adapters/pki-ca-certificate.js | 68 + ui/app/adapters/pki-certificate-sign.js | 14 + ui/app/adapters/pki.js | 22 + ui/app/adapters/pki/action.js | 7 - ui/app/adapters/pki/cert.js | 5 - ui/app/adapters/pki/certificate/base.js | 5 - ui/app/adapters/pki/certificate/generate.js | 5 - ui/app/adapters/pki/certificate/sign.js | 5 - ui/app/adapters/pki/config/acme.js | 15 - ui/app/adapters/pki/config/base.js | 21 - ui/app/adapters/pki/config/cluster.js | 15 - ui/app/adapters/pki/config/crl.js | 15 - ui/app/adapters/pki/config/urls.js | 15 - ui/app/adapters/pki/issuer.js | 60 +- ui/app/adapters/pki/key.js | 5 - ui/app/adapters/pki/pki-config.js | 128 + ui/app/adapters/pki/pki-role.js | 70 + ui/app/adapters/pki/role.js | 5 - ui/app/adapters/pki/sign-intermediate.js | 5 - ui/app/adapters/pki/tidy.js | 50 - ui/app/adapters/pki/urls.js | 20 + ui/app/adapters/policy.js | 5 - ui/app/adapters/policy/acl.js | 5 - ui/app/adapters/policy/egp.js | 5 - ui/app/adapters/policy/rgp.js | 5 - ui/app/adapters/raft-join.js | 5 - ui/app/adapters/replication-mode.js | 5 - ui/app/adapters/role-aws.js | 5 - ui/app/adapters/role-jwt.js | 5 - ui/app/adapters/role-ssh.js | 5 - ui/app/adapters/secret-engine.js | 7 +- ui/app/adapters/secret-v2-version.js | 5 - ui/app/adapters/secret-v2.js | 9 +- ui/app/adapters/secret.js | 5 - ui/app/adapters/server.js | 5 - ui/app/adapters/ssh-otp-credential.js | 5 - ui/app/adapters/ssh-sign.js | 5 - ui/app/adapters/ssh.js | 5 - ui/app/adapters/tools.js | 5 - ui/app/adapters/transform.js | 5 - ui/app/adapters/transform/alphabet.js | 5 - ui/app/adapters/transform/base.js | 5 - ui/app/adapters/transform/role.js | 5 - ui/app/adapters/transform/template.js | 5 - ui/app/adapters/transform/transformation.js | 5 - ui/app/adapters/transit-key.js | 5 - ui/app/app.js | 8 +- ui/app/breakpoints.js | 5 - ui/app/components/alert-popup.js | 28 + ui/app/components/alphabet-edit.js | 5 - ui/app/components/auth-config-form/config.js | 5 - ui/app/components/auth-config-form/options.js | 5 - ui/app/components/auth-form-options.js | 5 - ui/app/components/auth-form.js | 5 - .../{sidebar/user-menu.js => auth-info.js} | 16 +- ui/app/components/auth-jwt.js | 7 +- ui/app/components/b64-toggle.js | 5 - ui/app/components/basic-dropdown/trigger.js | 5 - ui/app/components/calendar-widget.js | 8 +- ui/app/components/clients/attribution.js | 5 - ui/app/components/clients/config.js | 33 +- ui/app/components/clients/dashboard.js | 5 - .../clients/horizontal-bar-chart.js | 5 - ui/app/components/clients/line-chart.js | 5 - ui/app/components/clients/monthly-usage.js | 5 - ui/app/components/clients/running-total.js | 5 - .../components/clients/vertical-bar-chart.js | 5 - ui/app/components/cluster-info.js | 27 + ui/app/components/configure-aws-secret.js | 5 - ui/app/components/configure-ssh-secret.js | 5 - ui/app/components/console/command-input.js | 5 - ui/app/components/console/log-help.js | 3 + ui/app/components/console/log-json.js | 5 - ui/app/components/console/log-list.js | 5 - ui/app/components/console/log-object.js | 5 - ui/app/components/console/log-text.js | 5 - ui/app/components/console/output-log.js | 5 - ui/app/components/console/ui-panel.js | 33 +- ui/app/components/control-group-success.js | 5 - ui/app/components/control-group.js | 5 - ui/app/components/database-connection.js | 5 - ui/app/components/database-role-edit.js | 5 - .../components/database-role-setting-form.js | 5 - ui/app/components/date-dropdown.js | 8 +- ui/app/components/diff-version-selector.js | 5 - ui/app/components/file-to-array-buffer.js | 5 - ui/app/components/flash-message.js | 5 - .../generate-credentials-database.js | 5 - ui/app/components/generate-credentials.js | 5 - ui/app/components/generated-item-list.js | 5 - ui/app/components/generated-item.js | 15 +- ui/app/components/get-credentials-card.js | 5 - ui/app/components/home-link.js | 28 + ui/app/components/hover-copy-button.js | 5 - ui/app/components/identity/_popup-base.js | 5 - ui/app/components/identity/edit-form.js | 5 - ui/app/components/identity/entity-nav.js | 5 - ui/app/components/identity/item-details.js | 5 - ui/app/components/identity/lookup-input.js | 5 - ui/app/components/identity/popup-alias.js | 5 - ui/app/components/identity/popup-members.js | 5 - ui/app/components/identity/popup-metadata.js | 5 - ui/app/components/identity/popup-policy.js | 5 - ui/app/components/key-version-select.js | 5 - ui/app/components/keymgmt/distribute.js | 5 - ui/app/components/keymgmt/key-edit.js | 5 - ui/app/components/keymgmt/provider-edit.js | 5 - ui/app/components/license-banners.js | 10 +- ui/app/components/license-info.js | 5 - ui/app/components/link-status.js | 5 - ui/app/components/logo-edition.js | 5 - ui/app/components/menu-sidebar.js | 15 + ui/app/components/mfa/method-form.js | 5 - ui/app/components/mfa/mfa-form.js | 5 - .../mfa/mfa-login-enforcement-form.js | 5 - .../mfa/mfa-login-enforcement-header.js | 5 - ui/app/components/mfa/mfa-setup-step-one.js | 5 - ui/app/components/mfa/mfa-setup-step-two.js | 5 - .../modal-form/oidc-assignment-template.js | 5 - .../components/modal-form/policy-template.hbs | 2 +- .../components/modal-form/policy-template.js | 5 - ui/app/components/mount-accessor-select.js | 5 - ui/app/components/mount-backend-form.js | 29 - ui/app/components/mount-backend/type-form.js | 5 - ui/app/components/namespace-link.js | 5 - ui/app/components/namespace-picker.js | 9 +- ui/app/components/nav-header.js | 29 + ui/app/components/nav-header/home.js | 5 + ui/app/components/nav-header/items.js | 5 + ui/app/components/nav-header/main.js | 5 + ui/app/components/not-found.js | 5 - ui/app/components/oidc-consent-block.js | 5 - ui/app/components/oidc/assignment-form.js | 5 - ui/app/components/oidc/client-form.js | 5 - ui/app/components/oidc/key-form.js | 5 - ui/app/components/oidc/provider-form.js | 5 - ui/app/components/oidc/scope-form.js | 5 - ui/app/components/outer-html.js | 5 - ui/app/components/pagination-controls.js | 5 - ui/app/components/pgp-file.js | 5 - ui/app/components/pgp-list.js | 5 - ui/app/components/pki/config-pki-ca.js | 185 + ui/app/components/pki/config-pki.js | 73 + ui/app/components/pki/pki-cert-popup.js | 24 + ui/app/components/pki/pki-cert-show.js | 9 + ui/app/components/pki/role-pki-edit.js | 8 + ui/app/components/policy-form.js | 5 - ui/app/components/radial-progress.js | 5 - ui/app/components/raft-join.js | 5 - ui/app/components/raft-storage-overview.js | 5 - ui/app/components/raft-storage-restore.js | 5 - ui/app/components/regex-validator.js | 5 - ui/app/components/role-aws-edit.js | 5 - ui/app/components/role-edit.js | 5 - ui/app/components/role-ssh-edit.js | 5 - ui/app/components/secret-create-or-update.js | 23 +- ui/app/components/secret-delete-menu.js | 5 - ui/app/components/secret-edit-metadata.js | 5 - ui/app/components/secret-edit-toolbar.js | 5 - ui/app/components/secret-edit.js | 5 - ui/app/components/secret-link.js | 5 - .../secret-list/database-list-item.js | 5 - .../secret-list/transform-list-item.js | 5 - ui/app/components/secret-version-menu.js | 5 - ui/app/components/section-tabs.js | 5 - ui/app/components/selectable-card.js | 5 - ui/app/components/shamir-progress.js | 5 - ui/app/components/sidebar/frame.hbs | 51 - ui/app/components/sidebar/frame.js | 9 - ui/app/components/sidebar/nav/access.hbs | 65 - ui/app/components/sidebar/nav/cluster.hbs | 98 - ui/app/components/sidebar/nav/cluster.js | 12 - ui/app/components/sidebar/nav/policies.hbs | 39 - ui/app/components/sidebar/nav/tools.hbs | 22 - ui/app/components/sidebar/user-menu.hbs | 83 - ui/app/components/splash-page.js | 9 +- .../components/splash-page/splash-content.js | 5 - .../components/splash-page/splash-footer.js | 5 - .../components/splash-page/splash-header.js | 5 - ui/app/components/status-menu.js | 44 + ui/app/components/token-expire-warning.js | 5 - ui/app/components/tool-actions-form.js | 5 - ui/app/components/tool-hash.js | 5 - ui/app/components/tool-lookup.js | 5 - ui/app/components/tool-random.js | 5 - ui/app/components/tool-rewrap.js | 5 - ui/app/components/tool-unwrap.js | 5 - ui/app/components/tool-wrap.js | 5 - ui/app/components/toolbar-secret-link.js | 5 - .../transform-advanced-templating.js | 5 - ui/app/components/transform-create-form.js | 5 - ui/app/components/transform-edit-base.js | 5 - ui/app/components/transform-edit-form.js | 5 - ui/app/components/transform-role-edit.js | 10 +- .../transform-show-transformation.js | 5 - ui/app/components/transform-template-edit.js | 5 - ui/app/components/transformation-edit.js | 10 +- ui/app/components/transit-edit.js | 5 - .../components/transit-key-action/export.js | 5 - ui/app/components/transit-key-actions.js | 5 - ui/app/components/ui-wizard.js | 5 - ui/app/components/wizard-content.js | 5 - .../components/wizard/features-selection.js | 5 - ui/app/components/wizard/mounts-wizard.js | 5 - ui/app/components/wizard/secrets-keymgmt.js | 5 - ui/app/components/wrap-ttl.js | 5 - ui/app/config/environment.d.ts | 5 - ui/app/controllers/application.js | 14 +- ui/app/controllers/vault.js | 14 +- ui/app/controllers/vault/cluster.js | 37 +- .../access/control-groups-configure.js | 5 - .../cluster/access/identity/aliases/add.js | 5 - .../cluster/access/identity/aliases/edit.js | 5 - .../cluster/access/identity/aliases/index.js | 5 - .../vault/cluster/access/identity/create.js | 5 - .../vault/cluster/access/identity/edit.js | 5 - .../vault/cluster/access/identity/index.js | 5 - .../vault/cluster/access/identity/merge.js | 5 - .../vault/cluster/access/leases/index.js | 5 - .../vault/cluster/access/leases/list-root.js | 5 - .../vault/cluster/access/leases/list.js | 5 - .../vault/cluster/access/leases/show.js | 5 - .../vault/cluster/access/method/item/list.js | 5 - .../vault/cluster/access/methods.js | 83 +- .../mfa/enforcements/enforcement/index.js | 5 - .../cluster/access/mfa/enforcements/index.js | 5 - .../vault/cluster/access/mfa/methods.js | 5 - .../cluster/access/mfa/methods/create.js | 5 - .../access/mfa/methods/method/index.js | 5 - .../vault/cluster/access/namespaces/create.js | 5 - .../vault/cluster/access/namespaces/index.js | 5 - .../controllers/vault/cluster/access/oidc.js | 5 - .../oidc/assignments/assignment/details.js | 5 - .../cluster/access/oidc/clients/client.js | 5 - .../access/oidc/clients/client/details.js | 5 - .../vault/cluster/access/oidc/keys/key.js | 5 - .../cluster/access/oidc/keys/key/details.js | 5 - .../cluster/access/oidc/providers/provider.js | 5 - .../access/oidc/providers/provider/details.js | 5 - .../access/oidc/scopes/scope/details.js | 5 - ui/app/controllers/vault/cluster/auth.js | 5 - ui/app/controllers/vault/cluster/clients.js | 5 - ui/app/controllers/vault/cluster/init.js | 5 - ui/app/controllers/vault/cluster/mfa-setup.js | 5 - .../vault/cluster/oidc-callback.js | 5 - .../vault/cluster/oidc-provider-ns.js | 5 - .../vault/cluster/oidc-provider.js | 5 - .../vault/cluster/policies/index.js | 5 - .../controllers/vault/cluster/policy/edit.js | 5 - .../cluster/replication-dr-promote/index.js | 5 - .../vault/cluster/secrets/backend.js | 5 - .../cluster/secrets/backend/actions-root.js | 5 - .../vault/cluster/secrets/backend/actions.js | 5 - .../cluster/secrets/backend/configuration.js | 7 +- .../cluster/secrets/backend/create-root.js | 5 - .../vault/cluster/secrets/backend/create.js | 5 - .../secrets/backend/credentials-root.js | 5 - .../cluster/secrets/backend/credentials.js | 5 - .../vault/cluster/secrets/backend/diff.js | 5 - .../cluster/secrets/backend/edit-root.js | 5 - .../vault/cluster/secrets/backend/edit.js | 5 - .../cluster/secrets/backend/list-root.js | 5 - .../vault/cluster/secrets/backend/list.js | 5 - .../vault/cluster/secrets/backend/metadata.js | 5 - .../cluster/secrets/backend/show-root.js | 5 - .../vault/cluster/secrets/backend/show.js | 5 - .../cluster/secrets/backend/sign-root.js | 5 - .../vault/cluster/secrets/backend/sign.js | 5 - .../vault/cluster/secrets/backends.js | 99 +- ui/app/controllers/vault/cluster/settings.js | 5 - .../vault/cluster/settings/auth/enable.js | 5 - .../settings/configure-secret-backend.js | 5 - .../cluster/settings/mount-secret-backend.js | 5 - .../vault/cluster/settings/seal.js | 5 - ui/app/controllers/vault/cluster/unseal.js | 5 - .../decorators/model-expanded-attributes.js | 92 - ui/app/decorators/model-form-fields.js | 5 - ui/app/decorators/model-validations.js | 78 +- ui/app/helpers/-date-base.js | 5 - ui/app/helpers/add-to-array.js | 5 - ui/app/helpers/add.js | 5 - ui/app/helpers/all-features.js | 5 - ui/app/helpers/await.js | 5 - ui/app/helpers/aws-regions.js | 5 - ui/app/helpers/coerce-eq.js | 5 - ui/app/helpers/date-from-now.js | 5 - ui/app/helpers/filter-wildcard.js | 5 - ui/app/helpers/has-permission.js | 9 +- ui/app/helpers/is-after.js | 5 - ui/app/helpers/is-before.js | 5 - ui/app/helpers/is-empty-value.js | 5 - ui/app/helpers/is-wildcard-string.js | 5 - ui/app/helpers/jsonify.js | 5 - ui/app/helpers/mountable-auth-methods.js | 5 - ui/app/helpers/mountable-secret-engines.js | 6 - ui/app/helpers/multi-line-join.js | 5 - ui/app/helpers/nav-to-route.js | 5 - ui/app/helpers/now.js | 5 - ui/app/helpers/number-to-word.js | 5 - ui/app/helpers/remove-from-array.js | 5 - ui/app/helpers/route-params-for.js | 5 - ui/app/helpers/secret-query-params.js | 5 - ui/app/helpers/sha2-digest-sizes.js | 5 - ui/app/helpers/split-object.js | 5 - ui/app/helpers/stringify.js | 5 - ui/app/helpers/sub.js | 5 - ui/app/helpers/supported-auth-backends.js | 5 - .../supported-managed-auth-backends.js | 5 - ui/app/helpers/supported-secret-backends.js | 5 - ui/app/helpers/tabs-for-auth-section.js | 5 - ui/app/helpers/tabs-for-identity-show.js | 5 - ui/app/helpers/to-label.js | 5 - ui/app/helpers/tools-actions.js | 5 - ui/app/helpers/wizard-constants.js | 5 - ui/app/index.html | 5 - ui/app/initializers/deprecation-filter.js | 5 - .../initializers/disable-ember-inspector.js | 5 - ui/app/initializers/ember-data-identifiers.js | 5 - ui/app/initializers/enable-engines.js | 5 - .../instance-initializers/track-csp-event.js | 5 - ui/app/lib/arg-tokenizer.js | 5 - ui/app/lib/attach-capabilities.js | 5 - ...{console-helpers.ts => console-helpers.js} | 169 +- ui/app/lib/control-group-error.js | 5 - ui/app/lib/key-utils.js | 5 - ui/app/lib/keycodes.js | 5 - ui/app/lib/kv-object.js | 5 - ui/app/lib/local-storage.js | 5 - ui/app/lib/memory-storage.js | 5 - ui/app/lib/path-to-tree.js | 5 - ui/app/lib/route-paths.js | 5 - ui/app/lib/token-storage.js | 5 - ui/app/machines/auth-machine.js | 5 - ui/app/machines/policies-machine.js | 5 - ui/app/machines/replication-machine.js | 5 - ui/app/machines/secrets-machine.js | 5 - ui/app/machines/tools-machine.js | 5 - ui/app/machines/tutorial-machine.js | 5 - ui/app/macros/identity-capabilities.js | 5 - ui/app/macros/lazy-capabilities.js | 5 - ui/app/macros/maybe-query-record.js | 5 - ui/app/mixins/backend-crumb.js | 5 - ui/app/mixins/cluster-route.js | 5 - ui/app/mixins/focus-on-insert.js | 5 - ui/app/mixins/key-mixin.js | 5 - ui/app/mixins/model-boundary-route.js | 5 - ui/app/mixins/unload-model-route.js | 5 - ui/app/mixins/unsaved-model-route.js | 5 - ui/app/mixins/with-nav-to-nearest-ancestor.js | 5 - ui/app/models/auth-config.js | 5 - ui/app/models/auth-config/approle.js | 5 - ui/app/models/auth-config/aws/client.js | 5 - .../auth-config/aws/identity-accesslist.js | 5 - .../auth-config/aws/roletag-denylist.js | 5 - ui/app/models/auth-config/aws/tidy.js | 5 - ui/app/models/auth-config/azure.js | 5 - ui/app/models/auth-config/cert.js | 5 - ui/app/models/auth-config/gcp.js | 5 - ui/app/models/auth-config/github.js | 5 - ui/app/models/auth-config/jwt.js | 5 - ui/app/models/auth-config/kubernetes.js | 5 - ui/app/models/auth-config/ldap.js | 5 - ui/app/models/auth-config/oidc.js | 5 - ui/app/models/auth-config/okta.js | 5 - ui/app/models/auth-config/radius.js | 5 - ui/app/models/auth-config/userpass.js | 5 - ui/app/models/auth-method.js | 15 +- ui/app/models/aws-credential.js | 5 - ui/app/models/capabilities.js | 5 - ui/app/models/clients/activity.js | 5 - ui/app/models/clients/config.js | 72 +- ui/app/models/clients/version-history.js | 5 - ui/app/models/cluster.js | 5 - ui/app/models/control-group-config.js | 5 - ui/app/models/control-group.js | 5 - ui/app/models/database/connection.js | 5 - ui/app/models/database/credential.js | 5 - ui/app/models/database/role.js | 5 - ui/app/models/identity/_base.js | 5 - ui/app/models/identity/entity-alias.js | 5 - ui/app/models/identity/entity-merge.js | 5 - ui/app/models/identity/entity.js | 5 - ui/app/models/identity/group-alias.js | 5 - ui/app/models/identity/group.js | 5 - ui/app/models/keymgmt/key.js | 5 - ui/app/models/keymgmt/provider.js | 5 - ui/app/models/kmip/ca.js | 5 - ui/app/models/kmip/config.js | 5 - ui/app/models/kmip/credential.js | 5 - ui/app/models/kmip/role.js | 7 +- ui/app/models/kmip/scope.js | 5 - ui/app/models/kubernetes/config.js | 5 - ui/app/models/kubernetes/role.js | 5 - ui/app/models/lease.js | 5 - ui/app/models/license.js | 5 - ui/app/models/mfa-login-enforcement.js | 5 - ui/app/models/mfa-method.js | 5 - ui/app/models/mount-config.js | 10 - ui/app/models/namespace.js | 33 +- ui/app/models/node.js | 5 - ui/app/models/oidc/assignment.js | 5 - ui/app/models/oidc/client.js | 5 - ui/app/models/oidc/key.js | 5 - ui/app/models/oidc/provider.js | 5 - ui/app/models/oidc/scope.js | 5 - ui/app/models/path-filter-config.js | 5 - ui/app/models/pki-ca-certificate-sign.js | 75 + ui/app/models/pki-ca-certificate.js | 152 + ui/app/models/pki-certificate-sign.js | 34 + ui/app/models/pki/about-pki-naming.md | 30 + ui/app/models/pki/action.js | 64 +- ui/app/models/pki/cert.js | 134 + ui/app/models/pki/certificate/base.js | 95 +- ui/app/models/pki/certificate/generate.js | 15 +- ui/app/models/pki/certificate/sign.js | 18 +- ui/app/models/pki/config/acme.js | 68 - ui/app/models/pki/config/cluster.js | 40 - ui/app/models/pki/config/crl.js | 98 - ui/app/models/pki/issuer.js | 116 +- ui/app/models/pki/key.js | 14 - ui/app/models/pki/pki-config.js | 56 + ui/app/models/pki/pki-role.js | 87 + ui/app/models/pki/role.js | 13 +- ui/app/models/pki/sign-intermediate.js | 25 +- ui/app/models/pki/tidy.js | 171 - ui/app/models/pki/{config => }/urls.js | 11 +- ui/app/models/policy.js | 5 - ui/app/models/policy/acl.js | 5 - ui/app/models/policy/egp.js | 5 - ui/app/models/policy/rgp.js | 5 - ui/app/models/raft-join.js | 5 - ui/app/models/replication-attributes.js | 5 - ui/app/models/replication-mode.js | 5 - ui/app/models/role-aws.js | 5 - ui/app/models/role-jwt.js | 5 - ui/app/models/role-ssh.js | 5 - ui/app/models/secret-engine.js | 284 +- ui/app/models/secret-v2-version.js | 8 +- ui/app/models/secret-v2.js | 5 - ui/app/models/secret.js | 5 - ui/app/models/server.js | 5 - ui/app/models/ssh-otp-credential.js | 5 - ui/app/models/ssh-sign.js | 5 - ui/app/models/test-form-model.js | 5 - ui/app/models/transform.js | 5 - ui/app/models/transform/alphabet.js | 5 - ui/app/models/transform/role.js | 5 - ui/app/models/transform/template.js | 5 - ui/app/models/transit-key.js | 5 - ui/app/router.js | 5 - ui/app/routes/application.js | 5 - ui/app/routes/loading.js | 5 - ui/app/routes/vault.js | 5 - ui/app/routes/vault/cluster.js | 18 +- ui/app/routes/vault/cluster/access.js | 5 - .../cluster/access/control-group-accessor.js | 9 +- .../access/control-groups-configure.js | 7 +- .../vault/cluster/access/control-groups.js | 7 +- .../routes/vault/cluster/access/identity.js | 5 - .../cluster/access/identity/aliases/add.js | 5 - .../cluster/access/identity/aliases/edit.js | 5 - .../cluster/access/identity/aliases/index.js | 5 - .../cluster/access/identity/aliases/show.js | 5 - .../vault/cluster/access/identity/create.js | 5 - .../vault/cluster/access/identity/edit.js | 5 - .../vault/cluster/access/identity/index.js | 5 - .../vault/cluster/access/identity/merge.js | 5 - .../vault/cluster/access/identity/show.js | 5 - ui/app/routes/vault/cluster/access/leases.js | 5 - .../vault/cluster/access/leases/index.js | 5 - .../vault/cluster/access/leases/list-root.js | 5 - .../vault/cluster/access/leases/list.js | 5 - .../vault/cluster/access/leases/show.js | 5 - ui/app/routes/vault/cluster/access/method.js | 5 - .../vault/cluster/access/method/index.js | 5 - .../vault/cluster/access/method/item.js | 5 - .../cluster/access/method/item/create.js | 5 - .../vault/cluster/access/method/item/edit.js | 5 - .../vault/cluster/access/method/item/list.js | 5 - .../vault/cluster/access/method/item/show.js | 5 - .../vault/cluster/access/method/section.js | 5 - ui/app/routes/vault/cluster/access/methods.js | 17 +- .../cluster/access/mfa/enforcements/create.js | 5 - .../access/mfa/enforcements/enforcement.js | 5 - .../mfa/enforcements/enforcement/edit.js | 5 - .../cluster/access/mfa/enforcements/index.js | 5 - .../routes/vault/cluster/access/mfa/index.js | 5 - .../cluster/access/mfa/methods/create.js | 5 - .../vault/cluster/access/mfa/methods/index.js | 5 - .../cluster/access/mfa/methods/method.js | 5 - .../cluster/access/mfa/methods/method/edit.js | 5 - .../vault/cluster/access/namespaces/create.js | 5 - .../vault/cluster/access/namespaces/index.js | 5 - ui/app/routes/vault/cluster/access/oidc.js | 5 - .../access/oidc/assignments/assignment.js | 5 - .../oidc/assignments/assignment/details.js | 5 - .../oidc/assignments/assignment/edit.js | 5 - .../cluster/access/oidc/assignments/create.js | 5 - .../cluster/access/oidc/assignments/index.js | 5 - .../cluster/access/oidc/clients/client.js | 5 - .../access/oidc/clients/client/details.js | 5 - .../access/oidc/clients/client/edit.js | 5 - .../access/oidc/clients/client/providers.js | 5 - .../cluster/access/oidc/clients/create.js | 5 - .../cluster/access/oidc/clients/index.js | 5 - .../routes/vault/cluster/access/oidc/index.js | 5 - .../vault/cluster/access/oidc/keys/create.js | 5 - .../vault/cluster/access/oidc/keys/index.js | 5 - .../vault/cluster/access/oidc/keys/key.js | 5 - .../cluster/access/oidc/keys/key/clients.js | 5 - .../cluster/access/oidc/keys/key/details.js | 5 - .../cluster/access/oidc/keys/key/edit.js | 5 - .../cluster/access/oidc/providers/create.js | 5 - .../cluster/access/oidc/providers/index.js | 5 - .../cluster/access/oidc/providers/provider.js | 5 - .../access/oidc/providers/provider/clients.js | 5 - .../access/oidc/providers/provider/details.js | 5 - .../access/oidc/providers/provider/edit.js | 5 - .../cluster/access/oidc/scopes/create.js | 5 - .../vault/cluster/access/oidc/scopes/index.js | 5 - .../vault/cluster/access/oidc/scopes/scope.js | 5 - .../access/oidc/scopes/scope/details.js | 5 - .../cluster/access/oidc/scopes/scope/edit.js | 5 - ui/app/routes/vault/cluster/auth.js | 10 +- ui/app/routes/vault/cluster/clients.js | 15 +- ui/app/routes/vault/cluster/clients/config.js | 5 - .../routes/vault/cluster/clients/dashboard.js | 8 +- ui/app/routes/vault/cluster/clients/edit.js | 5 - ui/app/routes/vault/cluster/clients/index.js | 10 - .../vault/cluster/cluster-route-base.js | 5 - ui/app/routes/vault/cluster/index.js | 5 - ui/app/routes/vault/cluster/init.js | 5 - ui/app/routes/vault/cluster/license.js | 5 - ui/app/routes/vault/cluster/logout.js | 5 - ui/app/routes/vault/cluster/mfa-setup.js | 5 - ui/app/routes/vault/cluster/oidc-callback.js | 53 +- .../routes/vault/cluster/oidc-provider-ns.js | 5 - ui/app/routes/vault/cluster/oidc-provider.js | 5 - ui/app/routes/vault/cluster/policies.js | 5 - .../routes/vault/cluster/policies/create.js | 5 - ui/app/routes/vault/cluster/policies/index.js | 5 - ui/app/routes/vault/cluster/policy.js | 5 - ui/app/routes/vault/cluster/policy/edit.js | 5 - ui/app/routes/vault/cluster/policy/index.js | 5 - ui/app/routes/vault/cluster/policy/show.js | 5 - ui/app/routes/vault/cluster/redirect.js | 5 - .../cluster/replication-dr-promote/details.js | 5 - .../cluster/replication-dr-promote/index.js | 5 - ui/app/routes/vault/cluster/secrets.js | 5 - .../routes/vault/cluster/secrets/backend.js | 5 - .../vault/cluster/secrets/backend/actions.js | 5 - .../cluster/secrets/backend/configuration.js | 5 - .../cluster/secrets/backend/create-root.js | 7 +- .../vault/cluster/secrets/backend/create.js | 5 - .../secrets/backend/credentials-root.js | 5 - .../cluster/secrets/backend/credentials.js | 5 - .../vault/cluster/secrets/backend/diff.js | 5 - .../cluster/secrets/backend/edit-metadata.js | 5 - .../cluster/secrets/backend/edit-root.js | 5 - .../vault/cluster/secrets/backend/edit.js | 5 - .../vault/cluster/secrets/backend/index.js | 5 - .../cluster/secrets/backend/list-root.js | 5 - .../vault/cluster/secrets/backend/list.js | 15 +- .../vault/cluster/secrets/backend/metadata.js | 5 - .../vault/cluster/secrets/backend/overview.js | 5 - .../cluster/secrets/backend/secret-edit.js | 9 +- .../cluster/secrets/backend/show-root.js | 5 - .../vault/cluster/secrets/backend/show.js | 5 - .../cluster/secrets/backend/sign-root.js | 5 - .../vault/cluster/secrets/backend/sign.js | 5 - .../cluster/secrets/backend/versions-root.js | 5 - .../vault/cluster/secrets/backend/versions.js | 5 - .../routes/vault/cluster/secrets/backends.js | 5 - ui/app/routes/vault/cluster/settings.js | 5 - .../vault/cluster/settings/auth/configure.js | 5 - .../cluster/settings/auth/configure/index.js | 5 - .../settings/auth/configure/section.js | 5 - .../vault/cluster/settings/auth/enable.js | 5 - .../vault/cluster/settings/auth/index.js | 5 - .../settings/configure-secret-backend.js | 17 +- .../configure-secret-backend/index.js | 10 + .../configure-secret-backend/section.js | 54 + ui/app/routes/vault/cluster/settings/index.js | 5 - .../cluster/settings/mount-secret-backend.js | 5 - ui/app/routes/vault/cluster/settings/seal.js | 5 - ui/app/routes/vault/cluster/storage.js | 5 - ui/app/routes/vault/cluster/tools.js | 5 - ui/app/routes/vault/cluster/tools/index.js | 5 - ui/app/routes/vault/cluster/tools/tool.js | 5 - ui/app/routes/vault/cluster/unseal.js | 5 - ui/app/serializers/application.js | 5 - ui/app/serializers/auth-method.js | 5 - ui/app/serializers/capabilities.js | 5 - ui/app/serializers/clients/activity.js | 10 +- ui/app/serializers/clients/config.js | 5 - ui/app/serializers/clients/version-history.js | 5 - ui/app/serializers/cluster.js | 5 - ui/app/serializers/config.js | 5 - ui/app/serializers/control-group.js | 5 - ui/app/serializers/database/connection.js | 5 - ui/app/serializers/database/credential.js | 5 - ui/app/serializers/database/role.js | 5 - ui/app/serializers/identity/_base.js | 5 - ui/app/serializers/identity/entity-alias.js | 5 - ui/app/serializers/identity/entity.js | 5 - ui/app/serializers/identity/group-alias.js | 5 - ui/app/serializers/identity/group.js | 5 - ui/app/serializers/keymgmt/key.js | 5 - ui/app/serializers/keymgmt/provider.js | 5 - ui/app/serializers/kubernetes/config.js | 5 - ui/app/serializers/kubernetes/role.js | 5 - ui/app/serializers/lease.js | 5 - ui/app/serializers/license.js | 5 - ui/app/serializers/mfa-login-enforcement.js | 5 - ui/app/serializers/mfa-method.js | 5 - ui/app/serializers/mount-config.js | 5 - ui/app/serializers/namespace.js | 5 - ui/app/serializers/node.js | 5 - ui/app/serializers/oidc/assignment.js | 5 - ui/app/serializers/oidc/client.js | 5 - ui/app/serializers/oidc/key.js | 5 - ui/app/serializers/oidc/provider.js | 5 - ui/app/serializers/oidc/scope.js | 5 - ui/app/serializers/path-filter-config.js | 5 - ui/app/serializers/pki/action.js | 34 - ui/app/serializers/pki/cert.js | 72 + ui/app/serializers/pki/certificate.js | 5 - ui/app/serializers/pki/certificate/base.js | 10 +- .../serializers/pki/certificate/generate.js | 5 - ui/app/serializers/pki/certificate/sign.js | 5 - ui/app/serializers/pki/issuer.js | 54 +- ui/app/serializers/pki/key.js | 5 - ui/app/serializers/pki/pki-config.js | 29 + ui/app/serializers/pki/pki-role.js | 3 + ui/app/serializers/pki/role.js | 14 - ui/app/serializers/pki/tidy.js | 17 - ui/app/serializers/policy.js | 5 - ui/app/serializers/policy/acl.js | 5 - ui/app/serializers/policy/egp.js | 5 - ui/app/serializers/policy/rgp.js | 5 - ui/app/serializers/replication-attributes.js | 5 - ui/app/serializers/replication-mode.js | 5 - ui/app/serializers/role-aws.js | 5 - ui/app/serializers/role-ssh.js | 5 - ui/app/serializers/role.js | 5 - ui/app/serializers/secret-engine.js | 5 - ui/app/serializers/secret-v2-version.js | 5 - ui/app/serializers/secret-v2.js | 5 - ui/app/serializers/secret.js | 5 - ui/app/serializers/server.js | 5 - ui/app/serializers/ssh.js | 5 - ui/app/serializers/transform.js | 5 - ui/app/serializers/transform/alphabet.js | 5 - ui/app/serializers/transform/role.js | 5 - ui/app/serializers/transform/template.js | 5 - ui/app/serializers/transit-key.js | 5 - ui/app/services/auth.js | 154 +- ui/app/services/console.js | 23 +- ui/app/services/control-group.js | 5 - ui/app/services/csp-event.js | 5 - ui/app/services/current-cluster.js | 5 - ui/app/services/download.ts | 8 +- ui/app/services/feature-flag.js | 5 - ui/app/services/flash-messages.ts | 19 +- ui/app/services/namespace.js | 5 - ui/app/services/path-help.js | 5 - ui/app/services/permissions.js | 14 +- ui/app/services/replication-mode.js | 5 - ui/app/services/secret-mount-path.js | 5 - ui/app/services/store.js | 5 - ui/app/services/version.js | 101 +- ui/app/services/wizard.js | 5 - ui/app/styles/app.scss | 18 +- ui/app/styles/components/action-block.scss | 9 +- ui/app/styles/components/auth-buttons.scss | 5 - ui/app/styles/components/auth-form.scss | 5 - .../styles/components/autocomplete-input.scss | 23 - ui/app/styles/components/b64-toggle.scss | 5 - ui/app/styles/components/box-label.scss | 9 +- ui/app/styles/components/box-radio.scss | 5 - ui/app/styles/components/calendar-widget.scss | 39 +- ui/app/styles/components/codemirror.scss | 9 +- ui/app/styles/components/confirm.scss | 21 +- .../styles/components/console-ui-panel.scss | 85 +- ui/app/styles/components/control-group.scss | 9 +- .../components/diff-version-selector.scss | 5 - ui/app/styles/components/doc-link.scss | 7 +- ...-state-component.scss => empty-state.scss} | 5 - ui/app/styles/components/env-banner.scss | 14 +- .../styles/components/features-selection.scss | 5 - ui/app/styles/components/form-section.scss | 9 +- ui/app/styles/components/global-flash.scss | 5 - .../styles/components/hover-copy-button.scss | 5 - ui/app/styles/components/icon.scss | 43 +- ui/app/styles/components/info-table-row.scss | 22 +- ui/app/styles/components/info-table.scss | 9 + .../styles/components/init-illustration.scss | 5 - ui/app/styles/components/input-hint.scss | 5 + ui/app/styles/components/kmip-role-edit.scss | 10 +- .../components/known-secondaries-card.scss | 5 - ui/app/styles/components/license-banners.scss | 5 - ui/app/styles/components/linkable-item.scss | 12 + ui/app/styles/components/linked-block.scss | 18 - ui/app/styles/components/list-item-row.scss | 13 +- ui/app/styles/components/list-pagination.scss | 128 +- ui/app/styles/components/loader.scss | 5 - ui/app/styles/components/login-form.scss | 5 - ui/app/styles/components/masked-input.scss | 17 +- .../{modal-component.scss => modal.scss} | 73 +- .../styles/components/namespace-picker.scss | 74 +- .../styles/components/namespace-reminder.scss | 5 - ui/app/styles/components/navigate-input.scss | 5 - ui/app/styles/components/page-header.scss | 11 +- ui/app/styles/components/popup-menu.scss | 31 +- ui/app/styles/components/radial-progress.scss | 5 - ui/app/styles/components/radio-card.scss | 5 - ui/app/styles/components/raft-join.scss | 5 - ui/app/styles/components/read-more.scss | 12 +- ui/app/styles/components/regex-validator.scss | 5 - .../components/replication-dashboard.scss | 7 +- .../styles/components/replication-header.scss | 5 + .../components/replication-mode-summary.scss | 5 - .../styles/components/replication-page.scss | 5 - .../components/replication-primary-card.scss | 5 - .../components/replication-summary.scss | 5 - ui/app/styles/components/role-item.scss | 5 - ui/app/styles/components/search-select.scss | 9 +- .../components/selectable-card-container.scss | 9 - ui/app/styles/components/selectable-card.scss | 9 +- .../styles/components/shamir-modal-flow.scss | 5 - ui/app/styles/components/shamir-progress.scss | 7 +- ui/app/styles/components/sidebar.scss | 120 +- ui/app/styles/components/splash-page.scss | 7 +- ui/app/styles/components/stat-text.scss | 5 - ui/app/styles/components/status-menu.scss | 19 + ui/app/styles/components/tabs-component.scss | 101 - ui/app/styles/components/tabs.scss | 63 + ui/app/styles/components/text-file.scss | 5 - .../components/token-expire-warning.scss | 5 - ui/app/styles/components/tool-tip.scss | 5 - ui/app/styles/components/toolbar.scss | 5 - ui/app/styles/components/transform-edit.scss | 5 - ui/app/styles/components/transit-card.scss | 5 - ui/app/styles/components/ttl-picker.scss | 5 - ui/app/styles/components/ui-wizard.scss | 24 +- ui/app/styles/components/unseal-warning.scss | 5 - ui/app/styles/components/vault-loading.scss | 5 - ui/app/styles/components/vlt-radio.scss | 5 - ui/app/styles/components/vlt-table.scss | 12 - ui/app/styles/core.scss | 84 +- ui/app/styles/core/alert-banner.scss | 5 - ui/app/styles/core/box.scss | 69 +- ui/app/styles/core/breadcrumb.scss | 31 +- .../styles/core/bulma-radio-checkboxes.scss | 25 + ui/app/styles/core/buttons.scss | 292 +- ui/app/styles/core/charts.scss | 23 +- ui/app/styles/core/checkbox-and-radio.scss | 124 - ui/app/styles/core/columns.scss | 201 - ui/app/styles/core/control.scss | 64 - ui/app/styles/core/element-styling.scss | 167 - ui/app/styles/core/field.scss | 141 - ui/app/styles/core/file.scss | 99 - ui/app/styles/core/footer.scss | 5 - ui/app/styles/core/forms.scss | 364 + ui/app/styles/core/generic.scss | 57 + ui/app/styles/core/helpers.scss | 345 + ui/app/styles/core/hero.scss | 7 + ui/app/styles/core/inputs.scss | 83 - ui/app/styles/core/label.scss | 26 - .../core/{containers.scss => layout.scss} | 22 +- ui/app/styles/core/level.scss | 82 - ui/app/styles/core/link.scss | 27 - ui/app/styles/core/lists.scss | 14 +- ui/app/styles/core/menu.scss | 27 +- ui/app/styles/core/message.scss | 95 +- ui/app/styles/core/navbar.scss | 289 + ui/app/styles/core/notification.scss | 57 + ui/app/styles/core/progress.scss | 66 +- ui/app/styles/core/select.scss | 81 - ui/app/styles/core/switch.scss | 153 +- ui/app/styles/core/tables.scss | 39 + ui/app/styles/core/tag.scss | 72 - ui/app/styles/core/tags.scss | 43 + ui/app/styles/core/title.scss | 41 +- ui/app/styles/core/toggle.scss | 16 +- ui/app/styles/engines.scss | 2 + ui/app/styles/helper-classes/colors.scss | 81 - .../helper-classes/flexbox-and-grid.scss | 115 - ui/app/styles/helper-classes/general.scss | 100 - ui/app/styles/helper-classes/layout.scss | 90 - ui/app/styles/helper-classes/spacing.scss | 169 - ui/app/styles/helper-classes/typography.scss | 106 - .../styles/pki/pki-not-valid-after-form.scss | 3 + ui/app/styles/reset.scss | 6 - .../styles/utils/_box-shadow_variables.scss | 24 - ui/app/styles/utils/_bulma_variables.scss | 96 + .../{_color_variables.scss => _colors.scss} | 26 +- ui/app/styles/utils/_font_variables.scss | 16 - ui/app/styles/utils/_size_variables.scss | 49 - ui/app/styles/utils/_spacing.scss | 9 + ui/app/styles/utils/animations.scss | 5 - ui/app/styles/utils/mixins.scss | 21 +- ui/app/templates/application.hbs | 8 +- .../templates}/components/alert-popup.hbs | 12 +- .../components/auth-form-options.hbs | 3 +- ui/app/templates/components/auth-info.hbs | 81 + .../templates/components/calendar-widget.hbs | 6 +- .../templates/components/clients/config.hbs | 109 +- .../components/clients/dashboard.hbs | 2 +- ui/app/templates/components/cluster-info.hbs | 150 + .../components/configure-ssh-secret.hbs | 8 +- .../components/console/command-input.hbs | 2 +- .../components/console/log-command.hbs | 6 +- .../templates/components/console/log-help.hbs | 1 - .../templates/components/console/ui-panel.hbs | 24 +- .../components/file-to-array-buffer.hbs | 2 +- ui/app/templates/components/home-link.hbs | 7 + ui/app/templates/components/link-status.hbs | 2 +- ui/app/templates/components/menu-sidebar.hbs | 21 + .../components/mfa/mfa-setup-step-two.hbs | 8 +- .../templates/components/namespace-picker.hbs | 151 +- ui/app/templates/components/nav-header.hbs | 42 + ui/app/templates/components/not-found.hbs | 5 +- .../components/pki/config-pki-ca.hbs | 264 + .../templates/components/pki/config-pki.hbs | 46 + .../components/pki/configure-pki-secret.hbs | 19 + .../components/pki/pki-cert-popup.hbs | 25 + .../components/pki/pki-cert-show.hbs | 94 + .../components/pki/role-pki-edit.hbs | 113 + .../components/secret-create-or-update.hbs | 8 +- .../components/secret-delete-menu.hbs | 2 +- .../components/secret-edit-metadata.hbs | 2 +- .../components/secret-edit-toolbar.hbs | 2 + .../templates/components/secret-form-show.hbs | 34 +- .../components/secret-list/pki-cert-item.hbs | 24 + .../components/secret-list/pki-role-item.hbs | 101 + .../templates/components/selectable-card.hbs | 2 +- ui/app/templates/components/splash-page.hbs | 14 + ui/app/templates/components/status-menu.hbs | 23 + ui/app/templates/components/tool-unwrap.hbs | 2 +- .../components/transform-role-edit.hbs | 22 +- .../components/transit-form-create.hbs | 2 +- .../components/transit-form-edit.hbs | 4 +- .../components/transit-form-show.hbs | 2 +- .../components/transit-key-action/datakey.hbs | 2 +- .../components/transit-key-action/decrypt.hbs | 2 +- .../components/transit-key-action/encrypt.hbs | 2 +- .../components/transit-key-action/hmac.hbs | 2 +- .../components/transit-key-action/rewrap.hbs | 4 +- .../components/transit-key-action/verify.hbs | 2 +- ui/app/templates/vault.hbs | 7 +- ui/app/templates/vault/cluster.hbs | 143 +- ui/app/templates/vault/cluster/access.hbs | 77 +- .../vault/cluster/access/loading.hbs | 1 - .../vault/cluster/access/methods.hbs | 29 +- .../vault/cluster/access/mfa/index.hbs | 2 +- .../cluster/access/mfa/methods/create.hbs | 6 +- .../templates/vault/cluster/access/oidc.hbs | 2 +- ui/app/templates/vault/cluster/auth.hbs | 7 +- ui/app/templates/vault/cluster/clients.hbs | 14 +- .../vault/cluster/clients/config.hbs | 2 +- .../vault/cluster/clients/loading.hbs | 1 - ui/app/templates/vault/cluster/policies.hbs | 53 +- .../vault/cluster/policies/index.hbs | 2 +- ui/app/templates/vault/cluster/policy.hbs | 47 +- .../cluster/secrets/backend/configuration.hbs | 5 +- .../vault/cluster/secrets/backend/error.hbs | 2 +- .../cluster/secrets/backend/metadata.hbs | 4 +- .../vault/cluster/secrets/backends.hbs | 178 +- .../templates/vault/cluster/settings/auth.hbs | 2 - .../settings/configure-secret-backend.hbs | 2 + .../configure-secret-backend/section.hbs | 7 + ui/app/templates/vault/cluster/tools/tool.hbs | 23 +- ui/app/templates/vault/cluster/unseal.hbs | 12 + ui/app/templates/vault/error.hbs | 77 +- ui/app/templates/vault/not-found.hbs | 15 +- ui/app/transforms/array.js | 5 - ui/app/transforms/object.js | 5 - ui/app/utils/api-path.js | 5 - ui/app/utils/camelize-object-keys.js | 13 - ui/app/utils/chart-helpers.js | 5 - ui/app/utils/clamp.js | 5 - ui/app/utils/database-helpers.js | 5 - ui/app/utils/error-message.js | 5 - ui/app/utils/field-to-attrs.js | 5 - ui/app/utils/identity-manager.js | 5 - ui/app/utils/openapi-to-attrs.js | 22 +- ui/app/utils/parse-pki-cert-oids.js | 18 +- ui/app/utils/parse-pki-cert.js | 58 +- ui/app/utils/path-encoding-helpers.js | 30 +- ui/app/utils/remove-record.js | 5 - ui/app/utils/trim-right.js | 5 - ui/app/utils/validators.js | 5 - .../files/__root__/__path__/__name__.js | 20 + .../__templatepath__/__templatename__.hbs | 2 + ui/blueprints/component/index.js | 89 + ui/config/content-security-policy.js | 5 - ui/config/deprecation-workflow.js | 5 - ui/config/environment.js | 5 - ui/config/targets.js | 5 - ui/docs/components.md | 61 - ui/docs/css.md | 39 - ui/docs/ember-engines.md | 175 - ui/docs/forms.md | 19 - ui/docs/model-validations.md | 159 - ui/docs/models.md | 153 - ui/docs/routing.md | 92 - ui/docs/serializers-adapters.md | 29 - ui/ember-cli-build.js | 17 +- ui/lib/.eslintrc.js | 5 - ui/lib/core/addon/components/alert-banner.js | 5 - ui/lib/core/addon/components/alert-inline.js | 5 - .../addon/components/autocomplete-input.js | 5 - ui/lib/core/addon/components/box-radio.js | 20 +- ui/lib/core/addon/components/checkbox-grid.ts | 5 - ui/lib/core/addon/components/chevron.js | 34 +- .../core/addon/components/confirm-action.js | 5 - ui/lib/core/addon/components/confirm.js | 5 - .../core/addon/components/confirm/message.js | 5 - .../addon/components/confirmation-modal.hbs | 5 +- .../addon/components/confirmation-modal.js | 5 - ui/lib/core/addon/components/doc-link.js | 5 - .../core/addon/components/download-button.js | 29 +- ui/lib/core/addon/components/edit-form.js | 5 - ui/lib/core/addon/components/empty-state.js | 28 + ui/lib/core/addon/components/external-link.js | 5 - .../core/addon/components/field-group-show.js | 20 + ui/lib/core/addon/components/form-error.js | 18 + .../components/form-field-groups-loop.js | 5 - .../addon/components/form-field-groups.hbs | 4 - .../addon/components/form-field-groups.js | 5 - .../core/addon/components/form-field-label.js | 5 - ui/lib/core/addon/components/form-field.hbs | 14 +- ui/lib/core/addon/components/form-field.js | 10 - .../addon/components/form-save-buttons.js | 5 - ui/lib/core/addon/components/icon.hbs | 7 - ui/lib/core/addon/components/icon.js | 41 +- .../components/info-table-item-array.hbs | 4 +- .../addon/components/info-table-item-array.js | 5 - .../core/addon/components/info-table-row.hbs | 2 +- .../core/addon/components/info-table-row.js | 43 +- ui/lib/core/addon/components/info-table.js | 20 +- ui/lib/core/addon/components/info-tooltip.hbs | 24 - ui/lib/core/addon/components/info-tooltip.js | 39 +- ui/lib/core/addon/components/input-search.js | 5 - ui/lib/core/addon/components/json-editor.js | 5 - .../core/addon/components/key-value-header.js | 7 +- .../core/addon/components/kv-object-editor.js | 5 - .../core/addon/components/layout-loading.js | 17 + ui/lib/core/addon/components/linkable-item.js | 23 + .../addon/components/linkable-item/content.js | 31 + .../addon/components/linkable-item/menu.js | 22 + ui/lib/core/addon/components/linked-block.js | 5 - ui/lib/core/addon/components/list-item.js | 5 - .../addon/components/list-item/content.js | 5 - .../addon/components/list-item/popup-menu.js | 5 - .../core/addon/components/list-pagination.js | 7 - ui/lib/core/addon/components/list-view.js | 5 - ui/lib/core/addon/components/masked-input.hbs | 58 - ui/lib/core/addon/components/masked-input.js | 98 +- ui/lib/core/addon/components/menu-loader.hbs | 9 - ui/lib/core/addon/components/menu-loader.js | 22 + ui/lib/core/addon/components/message-error.js | 5 - ui/lib/core/addon/components/modal.hbs | 8 +- ui/lib/core/addon/components/modal.js | 5 - .../addon/components/namespace-reminder.js | 39 +- .../core/addon/components/navigate-input.hbs | 20 - .../core/addon/components/navigate-input.js | 191 +- .../addon/components/object-list-input.js | 5 - .../core/addon/components/overview-card.hbs | 2 +- .../components/page-header-level-left.js | 5 + .../components/page-header-level-right.js | 5 + .../addon/components/page-header-level.hbs | 2 - .../core/addon/components/page-header-top.js | 5 + ui/lib/core/addon/components/page-header.hbs | 15 - ui/lib/core/addon/components/page-header.js | 18 +- .../core/addon/components/page/breadcrumbs.js | 5 - ui/lib/core/addon/components/popup-menu.hbs | 16 - ui/lib/core/addon/components/popup-menu.js | 21 + ui/lib/core/addon/components/read-more.js | 5 - .../addon/components/readonly-form-field.js | 5 - .../components/replication-action-demote.js | 5 - .../components/replication-action-disable.js | 5 - .../replication-action-generate-token.js | 5 - .../components/replication-action-promote.js | 5 - .../components/replication-action-recover.js | 5 - .../components/replication-action-reindex.js | 5 - .../replication-action-update-primary.js | 5 - .../components/replication-actions-single.js | 5 - .../addon/components/replication-actions.js | 5 - .../addon/components/replication-dashboard.js | 5 - .../addon/components/replication-header.js | 5 - .../components/replication-mode-summary.js | 5 - .../core/addon/components/replication-page.js | 5 - .../components/replication-secondary-card.js | 5 - .../components/replication-summary-card.js | 5 - .../components/replication-table-rows.js | 5 - .../components/search-select-placeholder.hbs | 8 - .../components/search-select-placeholder.js | 6 + .../components/search-select-with-modal.js | 5 - .../core/addon/components/search-select.hbs | 9 +- ui/lib/core/addon/components/search-select.js | 23 +- .../components/secret-list-header-tab.js | 5 - .../addon/components/secret-list-header.hbs | 87 + .../addon/components/secret-list-header.js | 38 +- ui/lib/core/addon/components/select.js | 5 - ui/lib/core/addon/components/shamir-flow.js | 5 - .../addon/components/shamir-modal-flow.js | 5 - ui/lib/core/addon/components/string-list.hbs | 19 +- ui/lib/core/addon/components/string-list.js | 28 +- ui/lib/core/addon/components/text-file.hbs | 2 +- ui/lib/core/addon/components/text-file.js | 5 - ui/lib/core/addon/components/toggle-button.js | 5 - ui/lib/core/addon/components/toggle.js | 5 - ui/lib/core/addon/components/tool-tip.js | 5 - .../core/addon/components/toolbar-actions.js | 25 + .../core/addon/components/toolbar-filters.js | 26 + ui/lib/core/addon/components/toolbar-link.js | 5 - ui/lib/core/addon/components/toolbar.js | 24 + ui/lib/core/addon/components/ttl-picker.js | 24 +- ui/lib/core/addon/components/upgrade-page.js | 30 +- .../addon/components/vault-logo-spinner.js | 17 + ui/lib/core/addon/decorators/confirm-leave.js | 5 - .../core/addon/helpers/changelog-url-for.js | 5 - ui/lib/core/addon/helpers/cluster-states.js | 5 - ui/lib/core/addon/helpers/date-format.js | 5 - ui/lib/core/addon/helpers/format-duration.js | 36 +- ui/lib/core/addon/helpers/format-number.js | 5 - ui/lib/core/addon/helpers/has-feature.js | 5 - ui/lib/core/addon/helpers/img-path.js | 5 - ui/lib/core/addon/helpers/is-active-route.js | 5 - ui/lib/core/addon/helpers/is-version.js | 5 - ui/lib/core/addon/helpers/loose-equal.js | 5 - ui/lib/core/addon/helpers/message-types.js | 10 - .../core/addon/helpers/options-for-backend.js | 44 +- ui/lib/core/addon/helpers/path-or-array.js | 5 - .../helpers/replication-action-for-mode.js | 5 - .../helpers/replication-mode-description.js | 5 - .../core/addon/helpers/set-flash-message.js | 5 - ui/lib/core/addon/mixins/list-controller.js | 5 - ui/lib/core/addon/mixins/list-route.js | 5 - .../core/addon/mixins/replication-actions.js | 5 - ui/lib/core/addon/modifiers/code-mirror.js | 5 - .../{ => templates}/components/box-radio.hbs | 0 .../{ => templates}/components/chevron.hbs | 0 .../components/empty-state.hbs | 31 +- .../components/field-group-show.hbs | 6 +- .../{ => templates}/components/form-error.hbs | 0 .../core/addon/templates/components/icon.hbs | 7 + .../{ => templates}/components/info-table.hbs | 4 +- .../templates/components/info-tooltip.hbs | 20 + .../components/layout-loading.hbs | 0 .../templates/components/linkable-item.hbs | 17 + .../components/linkable-item/content.hbs | 44 + .../components/linkable-item/menu.hbs | 3 + .../templates/components/masked-input.hbs | 54 + .../templates/components/menu-loader.hbs | 7 + .../components/namespace-reminder.hbs | 2 +- .../templates/components/navigate-input.hbs | 20 + .../templates/components/page-header.hbs | 15 + .../addon/templates/components/popup-menu.hbs | 14 + .../addon/templates/components/read-more.hbs | 2 +- .../components/replication-header.hbs | 25 +- .../components/replication-mode-summary.hbs | 7 +- .../components/replication-secondary-card.hbs | 2 +- .../components/replication-summary-card.hbs | 2 +- .../components/search-select-placeholder.hbs | 6 + .../templates/components/shamir-flow.hbs | 4 +- .../components/shamir-modal-flow.hbs | 7 +- .../components/toolbar-actions.hbs | 0 .../components/toolbar-filters.hbs | 0 .../{ => templates}/components/toolbar.hbs | 0 .../components/upgrade-page.hbs | 0 .../components/vault-logo-spinner.hbs | 0 ui/lib/core/addon/utils/b64.js | 5 - ui/lib/core/addon/utils/client-count-utils.js | 5 - ui/lib/core/addon/utils/common-prefix.js | 5 - ui/lib/core/addon/utils/date-formatters.js | 5 - ui/lib/core/addon/utils/duration-utils.ts | 19 - ui/lib/core/addon/utils/parse-url.js | 5 - .../addon/utils/search-select-has-many.js | 5 - ui/lib/core/addon/utils/timestamp.js | 15 - ui/lib/core/app/components/alert-banner.js | 5 - ui/lib/core/app/components/alert-inline.js | 5 - ui/lib/core/app/components/alert-popup.js | 1 - .../core/app/components/autocomplete-input.js | 5 - ui/lib/core/app/components/box-radio.js | 5 - ui/lib/core/app/components/checkbox-grid.js | 5 - ui/lib/core/app/components/chevron.js | 5 - ui/lib/core/app/components/confirm-action.js | 5 - ui/lib/core/app/components/confirm.js | 5 - ui/lib/core/app/components/confirm/message.js | 5 - .../core/app/components/confirmation-modal.js | 5 - ui/lib/core/app/components/doc-link.js | 5 - ui/lib/core/app/components/download-button.js | 5 - ui/lib/core/app/components/edit-form.js | 5 - ui/lib/core/app/components/empty-state.js | 5 - ui/lib/core/app/components/external-link.js | 5 - .../core/app/components/field-group-show.js | 5 - ui/lib/core/app/components/form-error.js | 5 - .../app/components/form-field-groups-loop.js | 5 - .../core/app/components/form-field-groups.js | 5 - .../core/app/components/form-field-label.js | 5 - ui/lib/core/app/components/form-field.js | 5 - .../core/app/components/form-save-buttons.js | 5 - ui/lib/core/app/components/icon.js | 5 - .../app/components/info-table-item-array.js | 5 - ui/lib/core/app/components/info-table-row.js | 5 - ui/lib/core/app/components/info-table.js | 5 - ui/lib/core/app/components/info-tooltip.js | 5 - ui/lib/core/app/components/input-search.js | 5 - ui/lib/core/app/components/json-editor.js | 5 - .../core/app/components/key-value-header.js | 5 - .../core/app/components/kv-object-editor.js | 5 - ui/lib/core/app/components/layout-loading.js | 5 - ui/lib/core/app/components/linkable-item.js | 1 + .../app/components/linkable-item/content.js | 1 + .../core/app/components/linkable-item/menu.js | 1 + ui/lib/core/app/components/linked-block.js | 5 - ui/lib/core/app/components/list-item.js | 5 - .../core/app/components/list-item/content.js | 5 - .../app/components/list-item/popup-menu.js | 5 - ui/lib/core/app/components/list-pagination.js | 5 - ui/lib/core/app/components/list-view.js | 5 - ui/lib/core/app/components/masked-input.js | 5 - ui/lib/core/app/components/menu-loader.js | 5 - ui/lib/core/app/components/message-error.js | 5 - ui/lib/core/app/components/modal.js | 5 - .../core/app/components/namespace-reminder.js | 5 - ui/lib/core/app/components/navigate-input.js | 5 - .../core/app/components/object-list-input.js | 5 - ui/lib/core/app/components/overview-card.js | 5 - .../app/components/page-header-level-left.js | 1 + .../app/components/page-header-level-right.js | 1 + .../core/app/components/page-header-level.js | 1 - ui/lib/core/app/components/page-header-top.js | 1 + ui/lib/core/app/components/page-header.js | 5 - .../core/app/components/page/breadcrumbs.js | 5 - ui/lib/core/app/components/page/error.js | 5 - ui/lib/core/app/components/popup-menu.js | 5 - ui/lib/core/app/components/radio-button.js | 5 - ui/lib/core/app/components/radio-card.js | 5 - ui/lib/core/app/components/read-more.js | 5 - .../app/components/readonly-form-field.js | 5 - .../components/replication-action-demote.js | 5 - .../components/replication-action-disable.js | 5 - .../replication-action-generate-token.js | 5 - .../components/replication-action-promote.js | 5 - .../components/replication-action-recover.js | 5 - .../components/replication-action-reindex.js | 5 - .../replication-action-update-primary.js | 5 - .../components/replication-actions-single.js | 5 - .../app/components/replication-actions.js | 5 - .../app/components/replication-dashboard.js | 5 - .../core/app/components/replication-header.js | 5 - .../components/replication-mode-summary.js | 5 - .../core/app/components/replication-page.js | 5 - .../components/replication-secondary-card.js | 5 - .../components/replication-summary-card.js | 5 - .../app/components/replication-table-rows.js | 5 - .../components/search-select-placeholder.js | 5 - .../components/search-select-with-modal.js | 5 - ui/lib/core/app/components/search-select.js | 5 - .../app/components/secret-list-header-tab.js | 5 - .../core/app/components/secret-list-header.js | 5 - ui/lib/core/app/components/select.js | 5 - ui/lib/core/app/components/shamir-flow.js | 5 - .../core/app/components/shamir-modal-flow.js | 5 - ui/lib/core/app/components/stat-text.js | 5 - ui/lib/core/app/components/string-list.js | 5 - ui/lib/core/app/components/text-file.js | 5 - ui/lib/core/app/components/toggle-button.js | 5 - ui/lib/core/app/components/toggle.js | 5 - ui/lib/core/app/components/tool-tip.js | 5 - ui/lib/core/app/components/toolbar-actions.js | 5 - ui/lib/core/app/components/toolbar-filters.js | 5 - ui/lib/core/app/components/toolbar-link.js | 5 - ui/lib/core/app/components/toolbar.js | 5 - ui/lib/core/app/components/ttl-picker.js | 5 - ui/lib/core/app/components/upgrade-page.js | 5 - .../core/app/components/vault-logo-spinner.js | 5 - ui/lib/core/app/decorators/confirm-leave.js | 5 - ui/lib/core/app/helpers/changelog-url-for.js | 5 - ui/lib/core/app/helpers/cluster-states.js | 5 - ui/lib/core/app/helpers/date-format.js | 5 - ui/lib/core/app/helpers/format-duration.js | 5 - ui/lib/core/app/helpers/format-number.js | 5 - ui/lib/core/app/helpers/has-feature.js | 5 - ui/lib/core/app/helpers/img-path.js | 5 - ui/lib/core/app/helpers/is-active-route.js | 5 - ui/lib/core/app/helpers/is-version.js | 5 - ui/lib/core/app/helpers/loose-equal.js | 5 - ui/lib/core/app/helpers/message-types.js | 5 - .../core/app/helpers/options-for-backend.js | 5 - ui/lib/core/app/helpers/path-or-array.js | 5 - .../helpers/replication-action-for-mode.js | 5 - .../helpers/replication-mode-description.js | 5 - ui/lib/core/app/helpers/set-flash-message.js | 5 - ui/lib/core/app/mixins/list-controller.js | 5 - ui/lib/core/app/mixins/list-route.js | 5 - ui/lib/core/app/mixins/replication-actions.js | 5 - ui/lib/core/app/modifiers/code-mirror.js | 5 - ui/lib/core/app/utils/b64.js | 5 - ui/lib/core/app/utils/timestamp.js | 1 - ui/lib/core/icon-mappings.js | 5 - ui/lib/core/index.js | 5 - ui/lib/css/index.js | 28 +- ui/lib/keep-gitkeep/index.js | 5 - .../addon/components/edit-form-kmip-role.js | 10 - ui/lib/kmip/addon/components/header-scope.js | 5 - .../kmip/addon/components/kmip-breadcrumb.js | 5 - .../components/operation-field-display.js | 5 - .../addon/controllers/credentials/index.js | 5 - .../addon/controllers/credentials/show.js | 5 - ui/lib/kmip/addon/controllers/role.js | 5 - ui/lib/kmip/addon/controllers/scope/roles.js | 5 - ui/lib/kmip/addon/controllers/scopes/index.js | 5 - ui/lib/kmip/addon/engine.js | 5 - ui/lib/kmip/addon/resolver.js | 5 - ui/lib/kmip/addon/routes.js | 5 - ui/lib/kmip/addon/routes/configuration.js | 5 - ui/lib/kmip/addon/routes/configure.js | 5 - .../kmip/addon/routes/credentials/generate.js | 5 - ui/lib/kmip/addon/routes/credentials/index.js | 5 - ui/lib/kmip/addon/routes/credentials/show.js | 5 - ui/lib/kmip/addon/routes/role.js | 5 - ui/lib/kmip/addon/routes/role/edit.js | 5 - ui/lib/kmip/addon/routes/scope/roles.js | 5 - .../kmip/addon/routes/scope/roles/create.js | 5 - ui/lib/kmip/addon/routes/scopes/create.js | 5 - ui/lib/kmip/addon/routes/scopes/index.js | 5 - .../components/edit-form-kmip-role.hbs | 2 +- ui/lib/kmip/config/environment.js | 5 - ui/lib/kmip/index.js | 5 - .../addon/components/page/configuration.hbs | 4 +- .../addon/components/page/configure.hbs | 6 +- .../addon/components/page/configure.js | 5 - .../addon/components/page/credentials.js | 9 +- .../addon/components/page/overview.js | 5 - .../components/page/role/create-and-edit.js | 12 +- .../addon/components/page/role/details.js | 5 - .../kubernetes/addon/components/page/roles.js | 5 - .../addon/controllers/roles/index.js | 5 - .../addon/decorators/fetch-config.js | 5 - ui/lib/kubernetes/addon/engine.js | 7 +- ui/lib/kubernetes/addon/routes.js | 5 - .../kubernetes/addon/routes/configuration.js | 5 - ui/lib/kubernetes/addon/routes/configure.js | 5 - ui/lib/kubernetes/addon/routes/error.js | 5 - ui/lib/kubernetes/addon/routes/index.js | 5 - ui/lib/kubernetes/addon/routes/overview.js | 5 - .../kubernetes/addon/routes/roles/create.js | 5 - ui/lib/kubernetes/addon/routes/roles/index.js | 5 - .../addon/routes/roles/role/credentials.js | 5 - .../addon/routes/roles/role/details.js | 5 - .../addon/routes/roles/role/edit.js | 5 - .../addon/routes/roles/role/index.js | 5 - .../addon/utils/generated-role-rules.js | 5 - ui/lib/kubernetes/config/environment.js | 5 - ui/lib/kubernetes/index.js | 5 - .../addon/components/swagger-ui.js | 5 - .../addon/controllers/index.js | 5 - ui/lib/open-api-explorer/addon/engine.js | 5 - ui/lib/open-api-explorer/addon/resolver.js | 5 - ui/lib/open-api-explorer/addon/routes.js | 5 - .../open-api-explorer/addon/routes/index.js | 5 - .../open-api-explorer/addon/styles/addon.css | 5 - .../addon/templates/components/swagger-ui.hbs | 5 +- .../open-api-explorer/config/environment.js | 5 - ui/lib/open-api-explorer/index.js | 5 - ui/lib/pki/README.md | 58 +- .../page/pki-certificate-details.hbs | 2 - .../page/pki-certificate-details.ts | 11 +- .../page/pki-configuration-details.hbs | 146 - .../page/pki-configuration-details.ts | 44 - .../page/pki-configuration-edit.hbs | 144 - .../components/page/pki-configuration-edit.ts | 87 - .../components/page/pki-configure-create.hbs | 95 - .../components/page/pki-issuer-details.hbs | 88 +- .../components/page/pki-issuer-details.ts | 23 - .../addon/components/page/pki-issuer-edit.ts | 11 +- .../page/pki-issuer-generate-intermediate.hbs | 20 - .../page/pki-issuer-generate-intermediate.ts | 11 - .../page/pki-issuer-generate-root.hbs | 30 - .../page/pki-issuer-generate-root.ts | 11 - .../components/page/pki-issuer-import.hbs | 21 - .../components/page/pki-issuer-import.ts | 11 - .../addon/components/page/pki-issuer-list.hbs | 65 - .../page/pki-issuer-rotate-root.hbs | 189 - .../components/page/pki-issuer-rotate-root.ts | 118 - .../addon/components/page/pki-key-details.hbs | 1 - .../addon/components/page/pki-key-details.ts | 13 +- .../addon/components/page/pki-overview.hbs | 32 +- .../pki/addon/components/page/pki-overview.ts | 31 +- .../components/page/pki-role-details.hbs | 12 +- .../addon/components/page/pki-role-details.ts | 15 +- .../page/pki-tidy-auto-configure.hbs | 18 - .../page/pki-tidy-auto-settings.hbs | 40 - .../addon/components/page/pki-tidy-manual.hbs | 18 - .../addon/components/page/pki-tidy-status.hbs | 193 - .../addon/components/page/pki-tidy-status.ts | 157 - .../parsed-certificate-info-rows.hbs | 9 - .../parsed-certificate-info-rows.ts | 54 - .../components/pki-ca-certificate-import.hbs | 36 + .../components/pki-ca-certificate-import.ts | 62 + .../addon/components/pki-configure-form.hbs | 67 + ...figure-create.ts => pki-configure-form.ts} | 26 +- .../pki/addon/components/pki-generate-csr.hbs | 93 +- .../pki/addon/components/pki-generate-csr.ts | 46 +- .../addon/components/pki-generate-root.hbs | 176 +- .../pki/addon/components/pki-generate-root.js | 89 + .../pki/addon/components/pki-generate-root.ts | 130 - .../components/pki-generate-toggle-groups.hbs | 10 +- .../components/pki-generate-toggle-groups.ts | 11 +- .../components/pki-import-pem-bundle.hbs | 89 - .../addon/components/pki-import-pem-bundle.ts | 107 - .../addon/components/pki-info-table-rows.hbs | 26 - .../addon/components/pki-issuer-cross-sign.js | 14 +- .../{pki-key-form.ts => pki-key-form.js} | 27 +- .../{pki-key-import.ts => pki-key-import.js} | 30 +- .../addon/components/pki-key-parameters.js | 42 + .../addon/components/pki-key-parameters.ts | 66 - ui/lib/pki/addon/components/pki-key-usage.hbs | 6 +- ui/lib/pki/addon/components/pki-key-usage.ts | 16 +- .../components/pki-not-valid-after-form.hbs | 6 +- .../components/pki-not-valid-after-form.ts | 7 +- ui/lib/pki/addon/components/pki-role-form.hbs | 75 +- ui/lib/pki/addon/components/pki-role-form.js | 61 + ui/lib/pki/addon/components/pki-role-form.ts | 90 - .../addon/components/pki-role-generate.hbs | 2 +- .../pki/addon/components/pki-role-generate.ts | 17 +- .../components/pki-sign-intermediate-form.hbs | 3 +- .../components/pki-sign-intermediate-form.ts | 18 +- ui/lib/pki/addon/components/pki-tidy-form.hbs | 80 - ui/lib/pki/addon/components/pki-tidy-form.ts | 64 - .../addon/controllers/certificates/index.js | 5 - ui/lib/pki/addon/controllers/issuers/index.js | 5 - ui/lib/pki/addon/controllers/keys/index.js | 5 - ui/lib/pki/addon/controllers/roles/index.js | 5 - .../addon/controllers/roles/role/generate.js | 5 - .../pki/addon/controllers/roles/role/sign.js | 5 - ui/lib/pki/addon/controllers/tidy/index.js | 37 - ui/lib/pki/addon/decorators/check-config.js | 43 - ui/lib/pki/addon/engine.js | 7 +- ui/lib/pki/addon/routes.js | 36 +- ui/lib/pki/addon/routes/application.js | 24 +- .../certificates/certificate/details.js | 5 - ui/lib/pki/addon/routes/certificates/index.js | 22 +- ui/lib/pki/addon/routes/configuration.js | 22 +- .../pki/addon/routes/configuration/create.js | 21 +- ui/lib/pki/addon/routes/configuration/edit.js | 33 +- .../pki/addon/routes/configuration/index.js | 34 +- ui/lib/pki/addon/routes/configuration/tidy.js | 3 + ui/lib/pki/addon/routes/error.js | 9 +- ui/lib/pki/addon/routes/index.js | 5 - .../routes/issuers/generate-intermediate.js | 20 +- .../pki/addon/routes/issuers/generate-root.js | 5 - ui/lib/pki/addon/routes/issuers/import.js | 19 +- ui/lib/pki/addon/routes/issuers/index.js | 19 +- .../addon/routes/issuers/issuer/cross-sign.js | 27 +- .../addon/routes/issuers/issuer/details.js | 44 +- .../pki/addon/routes/issuers/issuer/edit.js | 13 +- .../issuers/{issuer.js => issuer/index.js} | 14 +- .../routes/issuers/issuer/rotate-root.js | 48 - .../pki/addon/routes/issuers/issuer/sign.js | 11 +- ui/lib/pki/addon/routes/keys/create.js | 17 +- ui/lib/pki/addon/routes/keys/import.js | 17 +- ui/lib/pki/addon/routes/keys/index.js | 24 +- ui/lib/pki/addon/routes/keys/key.js | 13 +- ui/lib/pki/addon/routes/keys/key/details.js | 22 +- ui/lib/pki/addon/routes/keys/key/edit.js | 23 +- ui/lib/pki/addon/routes/overview.js | 44 +- ui/lib/pki/addon/routes/roles/create.js | 20 +- ui/lib/pki/addon/routes/roles/index.js | 23 +- ui/lib/pki/addon/routes/roles/role/details.js | 5 - ui/lib/pki/addon/routes/roles/role/edit.js | 29 +- .../pki/addon/routes/roles/role/generate.js | 12 +- ui/lib/pki/addon/routes/roles/role/sign.js | 12 +- ui/lib/pki/addon/routes/tidy.js | 18 - ui/lib/pki/addon/routes/tidy/auto.js | 8 - .../pki/addon/routes/tidy/auto/configure.js | 23 - ui/lib/pki/addon/routes/tidy/auto/index.js | 20 - ui/lib/pki/addon/routes/tidy/index.js | 47 - ui/lib/pki/addon/routes/tidy/manual.js | 29 - .../addon/templates/certificates/index.hbs | 4 +- .../addon/templates/configuration/create.hbs | 14 +- .../addon/templates/configuration/edit.hbs | 20 +- .../addon/templates/configuration/index.hbs | 39 +- .../addon/templates/configuration/tidy.hbs | 1 + .../issuers/generate-intermediate.hbs | 17 +- .../addon/templates/issuers/generate-root.hbs | 17 +- ui/lib/pki/addon/templates/issuers/import.hbs | 18 +- ui/lib/pki/addon/templates/issuers/index.hbs | 50 +- .../templates/issuers/issuer/cross-sign.hbs | 2 +- .../templates/issuers/issuer/details.hbs | 15 +- .../templates/issuers/issuer/rotate-root.hbs | 8 - ui/lib/pki/addon/templates/keys/index.hbs | 4 +- ui/lib/pki/addon/templates/overview.hbs | 4 +- ui/lib/pki/addon/templates/roles/create.hbs | 4 +- ui/lib/pki/addon/templates/roles/index.hbs | 18 +- .../pki/addon/templates/roles/role/edit.hbs | 7 +- ui/lib/pki/addon/templates/tidy.hbs | 1 - ui/lib/pki/addon/templates/tidy/auto.hbs | 1 - .../addon/templates/tidy/auto/configure.hbs | 1 - .../pki/addon/templates/tidy/auto/index.hbs | 1 - ui/lib/pki/addon/templates/tidy/index.hbs | 20 - ui/lib/pki/addon/templates/tidy/manual.hbs | 1 - ui/lib/pki/addon/utils/action-params.js | 7 - ui/lib/pki/app/utils/action-params.js | 5 - ui/lib/pki/config/environment.js | 5 - ui/lib/pki/index.js | 5 - ui/lib/pki/package.json | 1 - ui/lib/replication/addon/.eslintrc.js | 5 - .../components/known-secondaries-card.js | 5 - .../components/known-secondaries-table.js | 5 - .../components/path-filter-config-list.js | 5 - .../components/replication-primary-card.js | 5 - .../addon/components/replication-summary.js | 5 - .../addon/controllers/application.js | 5 - ui/lib/replication/addon/controllers/index.js | 5 - ui/lib/replication/addon/controllers/mode.js | 5 - .../addon/controllers/mode/index.js | 5 - .../addon/controllers/mode/manage.js | 5 - .../addon/controllers/mode/secondaries.js | 5 - .../addon/controllers/mode/secondaries/add.js | 5 - .../mode/secondaries/config-create.js | 5 - .../mode/secondaries/config-edit.js | 5 - .../mode/secondaries/config-show.js | 5 - .../controllers/mode/secondaries/index.js | 5 - .../controllers/mode/secondaries/revoke.js | 5 - .../addon/controllers/replication-mode.js | 5 - ui/lib/replication/addon/engine.js | 5 - ui/lib/replication/addon/resolver.js | 5 - ui/lib/replication/addon/routes.js | 5 - .../replication/addon/routes/application.js | 8 +- ui/lib/replication/addon/routes/index.js | 5 - ui/lib/replication/addon/routes/mode.js | 5 - ui/lib/replication/addon/routes/mode/index.js | 5 - .../replication/addon/routes/mode/manage.js | 5 - .../addon/routes/mode/secondaries.js | 5 - .../addon/routes/mode/secondaries/add.js | 5 - .../routes/mode/secondaries/config-create.js | 5 - .../routes/mode/secondaries/config-edit.js | 5 - .../routes/mode/secondaries/config-show.js | 5 - .../addon/routes/mode/secondaries/revoke.js | 5 - .../addon/routes/replication-base.js | 5 - .../components/replication-primary-card.hbs | 2 +- .../components/replication-summary.hbs | 2 +- ui/lib/replication/addon/templates/index.hbs | 1 + ui/lib/replication/addon/templates/mode.hbs | 1 + .../addon/utils/decode-config-from-jwt.js | 5 - ui/lib/replication/config/environment.js | 5 - ui/lib/replication/index.js | 5 - .../index.js | 5 - .../service-worker-registration/index.js | 5 - .../service-worker/index.js | 5 - ui/mirage/config.js | 5 - ui/mirage/factories/configuration.js | 5 - ui/mirage/factories/feature.js | 5 - ui/mirage/factories/kubernetes-config.js | 5 - ui/mirage/factories/kubernetes-role.js | 5 - ui/mirage/factories/mfa-duo-method.js | 5 - ui/mirage/factories/mfa-login-enforcement.js | 5 - ui/mirage/factories/mfa-method.js | 5 - ui/mirage/factories/mfa-okta-method.js | 5 - ui/mirage/factories/mfa-pingid-method.js | 5 - ui/mirage/factories/mfa-totp-method.js | 5 - ui/mirage/factories/secret-engine.js | 5 - ui/mirage/factories/server.js | 5 - ui/mirage/handlers/base.js | 5 - ui/mirage/handlers/clients.js | 8 +- ui/mirage/handlers/db.js | 5 - ui/mirage/handlers/hcp-link.js | 27 +- ui/mirage/handlers/index.js | 5 - ui/mirage/handlers/kms.js | 5 - ui/mirage/handlers/kubernetes.js | 5 - ui/mirage/handlers/mfa-config.js | 5 - ui/mirage/handlers/mfa-login.js | 5 - ui/mirage/handlers/oidc-config.js | 5 - .../helpers/modify-passthrough-response.js | 22 - ui/mirage/identity-managers/application.js | 5 - ui/mirage/models/feature.js | 5 - ui/mirage/scenarios/default.js | 5 - ui/mirage/scenarios/kubernetes.js | 5 - ui/mirage/serializers/application.js | 5 - ui/package.json | 36 +- ui/public/images/pki-rotate-root.png | Bin 60199 -> 0 bytes ui/public/images/pki-tidy.png | Bin 31153 -> 0 bytes ui/scripts/codemods/dropdown-transform.js | 5 - ui/scripts/codemods/icon-transform.js | 5 - ui/scripts/codemods/inject-service.js | 5 - .../codemods/jscodeshift-babylon-parser.js | 5 - .../codemods/linkto-with-on-modifier.js | 5 - .../codemods/no-quoteless-attributes.js | 5 - .../codemods/transform-deprecated-args.js | 5 - ui/scripts/enos-test-ember.js | 5 - ui/scripts/gen-story-md.js | 5 - ui/scripts/list-templates.js | 7 +- ui/scripts/start-vault.js | 5 - ui/scripts/test-helper.js | 5 - ui/testem.enos.js | 5 - ui/testem.js | 5 - ui/tests/.eslintrc.js | 5 - .../access/identity/_shared-alias-tests.js | 5 - .../access/identity/_shared-tests.js | 5 - .../identity/entities/aliases/create-test.js | 5 - .../access/identity/entities/create-test.js | 5 - .../access/identity/entities/index-test.js | 5 - .../identity/groups/aliases/create-test.js | 5 - .../access/identity/groups/create-test.js | 5 - ui/tests/acceptance/access/methods-test.js | 62 +- .../access/namespaces/index-test.js | 5 - .../acceptance/api-explorer/index-test.js | 5 - ui/tests/acceptance/auth-list-test.js | 33 +- ui/tests/acceptance/auth-test.js | 32 +- ui/tests/acceptance/aws-test.js | 25 +- ui/tests/acceptance/client-dashboard-test.js | 65 +- ui/tests/acceptance/cluster-test.js | 47 +- ui/tests/acceptance/console-test.js | 34 +- .../enterprise-control-groups-test.js | 5 - ui/tests/acceptance/enterprise-kmip-test.js | 5 - ui/tests/acceptance/enterprise-kmse-test.js | 5 - .../enterprise-license-banner-test.js | 30 +- .../acceptance/enterprise-namespaces-test.js | 33 +- .../acceptance/enterprise-replication-test.js | 10 +- .../acceptance/enterprise-sidebar-nav-test.js | 67 - .../acceptance/enterprise-transform-test.js | 5 - ui/tests/acceptance/init-test.js | 5 - ui/tests/acceptance/jwt-auth-method-test.js | 5 - ui/tests/acceptance/leases-test.js | 12 +- ui/tests/acceptance/managed-namespace-test.js | 5 - .../acceptance/mfa-login-enforcement-test.js | 7 +- ui/tests/acceptance/mfa-login-test.js | 5 - ui/tests/acceptance/mfa-method-test.js | 7 +- ui/tests/acceptance/mfa-setup-test.js | 24 +- ui/tests/acceptance/not-found-test.js | 20 +- ui/tests/acceptance/oidc-auth-method-test.js | 10 +- .../oidc-config/clients-assignments-test.js | 5 - .../oidc-config/clients-keys-test.js | 5 - .../oidc-config/providers-scopes-test.js | 5 - ui/tests/acceptance/oidc-provider-test.js | 20 +- ... pki-engine-route-cleanup-test.js-test.js} | 62 +- .../acceptance/pki/pki-action-forms-test.js | 298 - .../acceptance/pki/pki-configuration-test.js | 197 - .../acceptance/pki/pki-cross-sign-test.js | 111 - .../pki/pki-engine-workflow-test.js | 250 +- ui/tests/acceptance/pki/pki-overview-test.js | 14 - ui/tests/acceptance/pki/pki-tidy-test.js | 181 - ui/tests/acceptance/policies-acl-old-test.js | 14 +- ui/tests/acceptance/policies-test.js | 5 - ui/tests/acceptance/policies/index-test.js | 14 +- ui/tests/acceptance/policy-test.js | 5 - ui/tests/acceptance/policy/edit-test.js | 5 - ui/tests/acceptance/policy/show-test.js | 5 - ui/tests/acceptance/raft-storage-test.js | 5 - ui/tests/acceptance/redirect-to-test.js | 5 - .../secrets/backend/alicloud/secret-test.js | 10 +- .../secrets/backend/cubbyhole/secret-test.js | 11 +- .../secrets/backend/database/secret-test.js | 13 +- .../secrets/backend/engines-test.js | 81 +- .../secrets/backend/gcpkms/secrets-test.js | 10 +- .../secrets/backend/generic/secret-test.js | 16 +- .../backend/kubernetes/configuration-test.js | 5 - .../backend/kubernetes/credentials-test.js | 5 - .../backend/kubernetes/overview-test.js | 5 - .../secrets/backend/kubernetes/roles-test.js | 7 +- .../secrets/backend/kv/diff-test.js | 5 - .../secrets/backend/kv/secret-test.js | 73 +- .../secrets/backend/pki/cert-test.js | 99 + .../secrets/backend/pki/list-test.js | 59 + .../secrets/backend/pki/role-test.js | 81 + .../secrets/backend/ssh/role-test.js | 18 +- ui/tests/acceptance/settings-test.js | 11 +- .../settings/auth/configure/index-test.js | 12 +- .../settings/auth/configure/section-test.js | 12 +- .../acceptance/settings/auth/enable-test.js | 10 +- .../configure-ssh-secret-test.js | 14 +- .../pki/index-test.js | 27 + .../pki/section-cert-test.js | 143 + .../pki/section-crl-test.js | 28 + .../pki/section-tidy-test.js | 28 + .../pki/section-urls-test.js | 33 + .../settings/mount-secret-backend-test.js | 63 +- ui/tests/acceptance/sidebar-nav-test.js | 122 - ui/tests/acceptance/ssh-test.js | 31 +- ui/tests/acceptance/tools-test.js | 22 +- ui/tests/acceptance/transit-test.js | 24 +- ui/tests/acceptance/unseal-test.js | 5 - ui/tests/acceptance/wrapped-token-test.js | 5 - ui/tests/helpers/clients.js | 7 +- ui/tests/helpers/codemirror.js | 5 - ui/tests/helpers/components/sidebar-nav.js | 23 - ui/tests/helpers/components/ttl-picker.js | 5 - ui/tests/helpers/flash-message.js | 5 - ui/tests/helpers/index.js | 5 - ui/tests/helpers/kubernetes/overview.js | 5 - ui/tests/helpers/mirage-to-models.js | 5 - ui/tests/helpers/noop-all-api-requests.js | 5 - ui/tests/helpers/oidc-config.js | 5 - ui/tests/helpers/oidc-window-stub.js | 5 - ui/tests/helpers/pki.js | 5 - ui/tests/helpers/pki/overview.js | 8 - .../pki/page/pki-configuration-edit.js | 25 - ui/tests/helpers/pki/page/pki-keys.js | 5 - ui/tests/helpers/pki/page/pki-role-details.js | 5 - ui/tests/helpers/pki/page/pki-tidy-form.js | 18 - ui/tests/helpers/pki/page/pki-tidy.js | 30 - ui/tests/helpers/pki/pki-configure-create.js | 30 - ui/tests/helpers/pki/pki-configure-form.js | 13 + .../helpers/pki/pki-delete-all-issuers.js | 11 - ui/tests/helpers/pki/pki-generate-root.js | 18 - ui/tests/helpers/pki/pki-issuer-cross-sign.js | 23 - ui/tests/helpers/pki/pki-issuer-details.js | 18 +- ui/tests/helpers/pki/pki-key-form.js | 5 - .../helpers/pki/pki-not-valid-after-form.js | 5 - ui/tests/helpers/pki/pki-role-form.js | 7 - ui/tests/helpers/pki/pki-role-generate.js | 5 - ui/tests/helpers/pki/pki-run-commands.js | 5 - ui/tests/helpers/pki/values.js | 34 - ui/tests/helpers/pki/workflow.js | 20 +- ui/tests/helpers/policy-generator/pki.js | 5 - ui/tests/helpers/poll-cluster.js | 5 - ui/tests/helpers/stubs.js | 5 - ui/tests/helpers/wait-for-error.js | 5 - ui/tests/index.html | 5 - .../components/alert-inline-test.js | 5 - .../components/alert-popup-test.js | 58 - .../auth-config-form/options-test.js | 5 - .../integration/components/auth-form-test.js | 7 +- .../integration/components/auth-jwt-test.js | 5 - .../components/autocomplete-input-test.js | 5 - .../integration/components/b64-toggle-test.js | 5 - .../integration/components/box-radio-test.js | 5 - .../components/calendar-widget-test.js | 244 +- .../components/checkbox-grid-test.js | 5 - .../integration/components/chevron-test.js | 8 +- .../components/clients/attribution-test.js | 71 +- .../components/clients/config-test.js | 200 +- .../clients/horizontal-bar-chart-test.js | 5 - .../components/clients/line-chart-test.js | 71 +- .../components/clients/monthly-usage-test.js | 19 +- .../components/clients/running-total-test.js | 19 +- .../components/clients/usage-stats-test.js | 5 - .../clients/vertical-bar-chart-test.js | 5 - .../components/confirm-action-test.js | 5 - .../integration/components/confirm-test.js | 5 - .../components/confirmation-modal-test.js | 5 - .../components/console/log-command-test.js | 9 +- .../components/console/log-error-test.js | 5 - .../components/console/log-json-test.js | 5 - .../components/console/log-list-test.js | 5 - .../components/console/log-object-test.js | 5 - .../components/console/log-text-test.js | 5 - .../components/console/ui-panel-test.js | 5 - .../components/control-group-success-test.js | 5 - .../components/control-group-test.js | 5 - .../components/database-role-edit-test.js | 50 - .../database-role-setting-form-test.js | 5 - .../components/date-dropdown-test.js | 196 +- .../components/diff-version-selector-test.js | 5 - .../components/download-button-test.js | 110 - .../components/edit-form-kmip-role-test.js | 61 +- .../integration/components/edit-form-test.js | 5 - .../components/empty-state-test.js | 5 - .../components/features-selection-test.js | 5 - .../integration/components/form-error-test.js | 5 - .../components/form-field-label-test.js | 5 - .../integration/components/form-field-test.js | 21 - .../components/get-credentials-card-test.js | 5 - .../components/hover-copy-button-test.js | 5 - ui/tests/integration/components/icon-test.js | 5 - .../components/identity/item-details-test.js | 5 - .../components/info-table-item-array-test.js | 5 - .../components/info-table-row-test.js | 28 +- .../integration/components/info-table-test.js | 5 - .../components/json-editor-test.js | 5 - .../components/keymgmt/distribute-test.js | 5 - .../components/keymgmt/key-edit-test.js | 19 +- .../components/keymgmt/provider-edit-test.js | 5 - .../components/known-secondaries-card-test.js | 5 - .../known-secondaries-table-test.js | 5 - .../components/kubernetes/config-cta-test.js | 5 - .../kubernetes/page/configuration-test.js | 5 - .../kubernetes/page/configure-test.js | 11 +- .../kubernetes/page/credentials-test.js | 15 +- .../kubernetes/page/overview-test.js | 5 - .../page/role/create-and-edit-test.js | 31 - .../kubernetes/page/role/details-test.js | 7 +- .../components/kubernetes/page/roles-test.js | 5 - .../kubernetes/tab-page-header-test.js | 5 - .../components/kv-object-editor-test.js | 5 - .../components/license-banners-test.js | 40 +- .../components/license-info-test.js | 5 - .../components/link-status-test.js | 13 +- .../components/linkable-item-test.js | 83 + .../components/masked-input-test.js | 97 +- .../integration/components/mfa-form-test.js | 11 +- .../mfa-login-enforcement-form-test.js | 5 - .../mfa-login-enforcement-header-test.js | 5 - .../components/mfa-method-list-item-test.js | 5 - .../components/mfa/method-form-test.js | 5 - ui/tests/integration/components/modal-test.js | 5 - .../components/mount-accessor-select-test.js | 5 - .../components/mount-backend-form-test.js | 5 - .../mount-backend/type-form-test.js | 5 - .../integration/components/nav-header-test.js | 34 + .../components/object-list-input-test.js | 5 - .../components/oidc-consent-block-test.js | 5 - .../components/oidc/assignment-form-test.js | 5 - .../components/oidc/client-form-test.js | 5 - .../components/oidc/key-form-test.js | 5 - .../components/oidc/provider-form-test.js | 5 - .../components/oidc/scope-form-test.js | 5 - .../components/okta-number-challenge-test.js | 5 - .../components/overview-card-test.js | 5 - .../components/page/breadcrumbs-test.js | 5 - .../integration/components/page/error-test.js | 5 - .../components/pagination-controls-test.js | 5 - .../path-filter-config-list-test.js | 5 - .../integration/components/pgp-file-test.js | 5 - .../integration/components/pgp-list-test.js | 5 - .../components/pki-configure-form-test.js | 31 + .../{pki => }/pki-generate-root-test.js | 9 +- .../{pki => }/pki-role-generate-test.js | 5 - .../pki-sign-intermediate-form-test.js | 7 +- .../components/pki/config-pki-ca-test.js | 80 + .../components/pki/config-pki-test.js | 149 + .../pki/page/pki-certificate-details-test.js | 12 +- .../page/pki-configuration-details-test.js | 255 - .../pki/page/pki-configuration-edit-test.js | 404 - .../pki/page/pki-configure-create-test.js | 61 - .../pki/page/pki-issuer-edit-test.js | 5 - .../pki-issuer-generate-intermediate-test.js | 80 - .../pki/page/pki-issuer-generate-root-test.js | 77 - .../pki/page/pki-issuer-import-test.js | 67 - .../pki/page/pki-issuer-list-test.js | 75 - .../pki/page/pki-issuer-rotate-root-test.js | 257 - .../pki/page/pki-key-details-test.js | 5 - .../components/pki/page/pki-key-list-test.js | 5 - .../components/pki/page/pki-overview-test.js | 5 - .../pki/page/pki-role-details-test.js | 13 +- .../pki/page/pki-tidy-auto-settings-test.js | 67 - .../pki/page/pki-tidy-status-test.js | 114 - .../pki/parsed-certificate-info-rows-test.js | 43 - .../components/pki/pki-generate-csr-test.js | 92 +- .../pki/pki-generate-toggle-groups-test.js | 9 +- .../pki/pki-import-pem-bundle-test.js | 195 - .../pki/pki-issuer-cross-sign-test.js | 62 +- .../components/pki/pki-issuer-details-test.js | 120 +- .../components/pki/pki-issuer-import-test.js | 95 + .../components/pki/pki-key-form-test.js | 5 - .../components/pki/pki-key-parameters-test.js | 5 - .../components/pki/pki-key-usage-test.js | 5 - .../pki/pki-not-valid-after-form-test.js | 5 - .../components/pki/pki-role-form-test.js | 66 +- .../components/pki/pki-tidy-form-test.js | 315 - .../components/policy-form-test.js | 5 - .../components/radial-progress-test.js | 5 - .../components/radio-button-test.js | 5 - .../integration/components/raft-join-test.js | 5 - .../components/raft-storage-overview-test.js | 5 - .../components/raft-storage-restore-test.js | 5 - .../integration/components/read-more-test.js | 5 - .../components/readonly-form-field-test.js | 5 - .../components/regex-validator-test.js | 5 - .../replication-action-generate-token-test.js | 5 - .../components/replication-actions-test.js | 5 - .../components/replication-dashboard-test.js | 5 - .../components/replication-header-test.js | 5 - .../components/replication-page-test.js | 5 - .../replication-primary-card-test.js | 5 - .../replication-secondary-card-test.js | 5 - .../replication-summary-card-test.js | 5 - .../components/replication-table-rows-test.js | 5 - .../components/search-select-test.js | 60 +- .../search-select-with-modal-test.js | 5 - .../components/secret-edit-test.js | 5 - .../components/secret-list-header-test.js | 95 +- .../integration/components/select-test.js | 5 - .../components/selectable-card-test.js | 5 - .../components/shamir-flow-test.js | 5 - .../components/shamir-modal-flow-test.js | 5 - .../components/sidebar/frame-test.js | 68 - .../components/sidebar/nav/access-test.js | 66 - .../components/sidebar/nav/cluster-test.js | 67 - .../components/sidebar/nav/policies-test.js | 45 - .../components/sidebar/nav/tools-test.js | 44 - .../components/sidebar/user-menu-test.js | 64 - .../integration/components/stat-text-test.js | 5 - .../components/string-list-test.js | 19 +- .../integration/components/text-file-test.js | 5 - .../components/toggle-button-test.js | 5 - .../integration/components/toggle-test.js | 5 - .../components/token-expire-warning-test.js | 5 - .../integration/components/tool-tip-test.js | 5 - .../components/toolbar-actions-test.js | 5 - .../components/toolbar-filters-test.js | 5 - .../components/toolbar-link-test.js | 5 - .../integration/components/toolbar-test.js | 5 - .../transform-advanced-templating-test.js | 5 - .../components/transform-edit-base-test.js | 5 - .../components/transform-list-item-test.js | 5 - .../components/transform-role-edit-test.js | 5 - .../components/transit-edit-test.js | 105 - .../components/transit-key-actions-test.js | 5 - .../integration/components/ttl-picker-test.js | 5 - .../components/upgrade-page-test.js | 5 - .../integration/components/wrap-ttl-test.js | 5 - .../integration/helpers/add-to-array-test.js | 5 - .../helpers/changelog-url-for-test.js | 5 - .../integration/helpers/date-format-test.js | 29 +- .../integration/helpers/date-from-now-test.js | 5 - .../helpers/format-duration-test.js | 56 +- .../integration/helpers/has-feature-test.js | 5 - .../helpers/has-permission-test.js | 35 - .../helpers/is-empty-value-test.js | 5 - .../helpers/remove-from-array-test.js | 5 - ui/tests/integration/services/auth-test.js | 57 +- .../utils/client-count-utils-test.js | 5 - .../integration/utils/date-formatters-test.js | 22 +- .../integration/utils/field-to-attrs-test.js | 5 - .../integration/utils/parse-pki-cert-test.js | 17 +- ui/tests/pages/access/identity/aliases/add.js | 5 - .../pages/access/identity/aliases/index.js | 5 - .../pages/access/identity/aliases/show.js | 5 - ui/tests/pages/access/identity/create.js | 5 - ui/tests/pages/access/identity/index.js | 5 - ui/tests/pages/access/identity/show.js | 5 - ui/tests/pages/access/methods.js | 11 +- ui/tests/pages/access/namespaces/index.js | 5 - ui/tests/pages/auth.js | 29 +- ui/tests/pages/components/alert-banner.js | 5 - .../components/auth-config-form/options.js | 5 - ui/tests/pages/components/auth-form.js | 5 - ui/tests/pages/components/auth-jwt.js | 5 - ui/tests/pages/components/calendar-widget.js | 9 +- ui/tests/pages/components/console/ui-panel.js | 5 - .../pages/components/control-group-success.js | 5 - ui/tests/pages/components/control-group.js | 5 - ui/tests/pages/components/edit-form.js | 5 - ui/tests/pages/components/flash-message.js | 5 - ui/tests/pages/components/form-field.js | 5 - .../pages/components/hover-copy-button.js | 5 - .../pages/components/identity/edit-form.js | 5 - .../pages/components/identity/item-details.js | 5 - ui/tests/pages/components/info-table-row.js | 5 - ui/tests/pages/components/json-editor.js | 5 - ui/tests/pages/components/kv-object-editor.js | 5 - ui/tests/pages/components/license-info.js | 5 - ui/tests/pages/components/list-view.js | 5 - ui/tests/pages/components/masked-input.js | 6 - .../pages/components/mount-backend-form.js | 5 - ui/tests/pages/components/nav-header.js | 8 + .../pages/components/pki/config-pki-ca.js | 49 + ui/tests/pages/components/pki/config-pki.js | 15 + ui/tests/pages/components/radial-progress.js | 5 - ui/tests/pages/components/search-select.js | 5 - .../components/wizard/features-selection.js | 5 - ui/tests/pages/init.js | 5 - ui/tests/pages/logout.js | 5 - ui/tests/pages/policies/create.js | 5 - ui/tests/pages/policies/index.js | 5 - ui/tests/pages/policy/edit.js | 5 - ui/tests/pages/policy/show.js | 5 - .../pages/secrets/backend/configuration.js | 9 +- ui/tests/pages/secrets/backend/create.js | 5 - ui/tests/pages/secrets/backend/credentials.js | 5 - .../secrets/backend/database/connection.js | 5 - .../pages/secrets/backend/database/role.js | 5 - ui/tests/pages/secrets/backend/edit.js | 5 - .../pages/secrets/backend/kmip/credentials.js | 7 +- ui/tests/pages/secrets/backend/kmip/roles.js | 5 - ui/tests/pages/secrets/backend/kmip/scopes.js | 5 - .../pages/secrets/backend/kv/edit-secret.js | 5 - ui/tests/pages/secrets/backend/kv/show.js | 5 - ui/tests/pages/secrets/backend/list.js | 5 - .../pages/secrets/backend/pki/edit-role.js | 5 - .../secrets/backend/pki/generate-cert.js | 5 - ui/tests/pages/secrets/backend/pki/show.js | 5 - ui/tests/pages/secrets/backend/show.js | 5 - .../pages/secrets/backend/ssh/edit-role.js | 5 - .../pages/secrets/backend/ssh/generate-otp.js | 5 - ui/tests/pages/secrets/backend/ssh/show.js | 5 - .../secrets/backend/transform/alphabets.js | 5 - .../pages/secrets/backend/transform/roles.js | 5 - .../secrets/backend/transform/templates.js | 5 - .../backend/transform/transformations.js | 5 - ui/tests/pages/secrets/backends.js | 7 +- .../pages/settings/auth/configure/index.js | 5 - .../pages/settings/auth/configure/section.js | 5 - ui/tests/pages/settings/auth/enable.js | 5 - .../configure-secret-backends/pki/index.js | 5 + .../pki/section-cert.js | 10 + .../configure-secret-backends/pki/section.js | 14 + .../pages/settings/mount-secret-backend.js | 7 +- ui/tests/test-helper.js | 5 - ui/tests/unit/adapters/aws-credential-test.js | 5 - ui/tests/unit/adapters/capabilities-test.js | 5 - .../unit/adapters/clients-activity-test.js | 41 +- ui/tests/unit/adapters/cluster-test.js | 5 - ui/tests/unit/adapters/console-test.js | 5 - .../unit/adapters/identity/_test-cases.js | 5 - .../adapters/identity/entity-alias-test.js | 5 - .../adapters/identity/entity-merge-test.js | 5 - .../unit/adapters/identity/entity-test.js | 5 - .../adapters/identity/group-alias-test.js | 5 - ui/tests/unit/adapters/identity/group-test.js | 5 - ui/tests/unit/adapters/kmip/role-test.js | 5 - .../unit/adapters/kubernetes/config-test.js | 5 - .../unit/adapters/kubernetes/role-test.js | 5 - .../unit/adapters/oidc/assignment-test.js | 5 - ui/tests/unit/adapters/oidc/client-test.js | 5 - ui/tests/unit/adapters/oidc/key-test.js | 5 - ui/tests/unit/adapters/oidc/provider-test.js | 5 - ui/tests/unit/adapters/oidc/scope-test.js | 5 - ui/tests/unit/adapters/oidc/test-helper.js | 5 - ui/tests/unit/adapters/pki/action-test.js | 68 +- .../adapters/pki/certificate/base-test.js | 5 - .../adapters/pki/certificate/generate-test.js | 5 - .../adapters/pki/certificate/sign-test.js | 5 - ui/tests/unit/adapters/pki/config-test.js | 68 - ui/tests/unit/adapters/pki/key-test.js | 5 - ui/tests/unit/adapters/pki/role-test.js | 5 - .../adapters/pki/sign-intermediate-test.js | 5 - ui/tests/unit/adapters/pki/tidy-test.js | 80 - ui/tests/unit/adapters/pki/urls-test.js | 48 + ui/tests/unit/adapters/secret-engine-test.js | 5 - ui/tests/unit/adapters/secret-test.js | 5 - ui/tests/unit/adapters/secret-v2-test.js | 5 - .../unit/adapters/secret-v2-version-test.js | 5 - ui/tests/unit/adapters/tools-test.js | 5 - ui/tests/unit/adapters/transit-key-test.js | 5 - ui/tests/unit/components/auth-form-test.js | 5 - ui/tests/unit/components/auth-jwt-test.js | 5 - .../components/identity/edit-form-test.js | 5 - .../model-expanded-attributes-test.js | 100 - .../unit/decorators/model-form-fields-test.js | 5 - .../unit/decorators/model-validations-test.js | 27 +- ui/tests/unit/helpers/await-test.js | 5 - ui/tests/unit/helpers/filter-wildcard-test.js | 5 - .../unit/helpers/is-wildcard-string-test.js | 5 - ui/tests/unit/lib/attach-capabilities-test.js | 5 - ui/tests/unit/lib/console-helpers-test.js | 124 +- ui/tests/unit/lib/kv-object-test.js | 5 - ui/tests/unit/lib/local-storage-test.js | 5 - ui/tests/unit/lib/path-to-tree-test.js | 5 - ui/tests/unit/machines/auth-machine-test.js | 5 - .../unit/machines/policies-machine-test.js | 5 - .../unit/machines/replication-machine-test.js | 5 - .../unit/machines/secrets-machine-test.js | 5 - ui/tests/unit/machines/tools-machine-test.js | 5 - .../unit/machines/tutorial-machine-test.js | 5 - ui/tests/unit/mixins/cluster-route-test.js | 5 - ui/tests/unit/models/capabilities-test.js | 5 - ui/tests/unit/models/role-jwt-test.js | 5 - ui/tests/unit/models/secret-engine-test.js | 480 +- .../unit/models/secret-v2-version-test.js | 5 - ui/tests/unit/models/transit-key-test.js | 5 - .../vault/cluster/oidc-callback-test.js | 254 +- .../routes/vault/cluster/redirect-test.js | 5 - ui/tests/unit/serializers/cluster-test.js | 5 - .../serializers/mfa-login-enforcement-test.js | 5 - ui/tests/unit/serializers/pki/action-test.js | 5 - ui/tests/unit/serializers/policy-test.js | 5 - ui/tests/unit/serializers/transit-key-test.js | 5 - ui/tests/unit/services/auth-test.js | 5 - ui/tests/unit/services/console-test.js | 63 +- ui/tests/unit/services/control-group-test.js | 11 +- ui/tests/unit/services/feature-flag-test.js | 5 - ui/tests/unit/services/path-helper-test.js | 5 - ui/tests/unit/services/permissions-test.js | 144 +- ui/tests/unit/services/store-test.js | 5 - ui/tests/unit/services/version-test.js | 35 +- ui/tests/unit/services/wizard-test.js | 5 - ui/tests/unit/utils/api-path-test.js | 5 - ui/tests/unit/utils/chart-helpers-test.js | 5 - ui/tests/unit/utils/common-prefix-test.js | 5 - .../unit/utils/decode-config-from-jwt-test.js | 5 - ui/tests/unit/utils/openapi-to-attrs-test.js | 68 - ui/tests/unit/utils/timestamp-test.js | 16 - ui/tests/unit/utils/trim-right-test.js | 5 - ui/tests/unit/utils/validators-test.js | 5 - .../services/flash-messages.d.ts | 5 - .../ember-data/types/registries/adapter.d.ts | 11 - .../ember-data/types/registries/model.d.ts | 5 - ui/types/forms.ts | 5 - ui/types/global.d.ts | 10 - ui/types/vault/adapters/pki/issuer.d.ts | 12 - ui/types/vault/adapters/pki/role.d.ts | 5 - ui/types/vault/adapters/pki/tidy.d.ts | 12 - ui/types/vault/app-types.ts | 39 +- ui/types/vault/index.d.ts | 5 - ui/types/vault/models/capabilities.d.ts | 5 - ui/types/vault/models/pki/action.d.ts | 42 +- .../vault/models/pki/certificate/base.d.ts | 19 +- .../models/pki/certificate/generate.d.ts | 13 +- .../pki/certificate/sign-intermediate.d.ts | 22 - .../vault/models/pki/certificate/sign.d.ts | 16 +- ui/types/vault/models/pki/config/crl.d.ts | 16 - ui/types/vault/models/pki/config/urls.d.ts | 11 - ui/types/vault/models/pki/issuer.d.ts | 27 +- ui/types/vault/models/pki/key.d.ts | 26 - ui/types/vault/models/pki/role.d.ts | 13 - ui/types/vault/models/pki/tidy.d.ts | 29 - .../vault/utils/camelize-object-keys.d.ts | 6 - ui/types/vault/utils/field-to-attrs.d.ts | 5 - ui/types/vault/utils/parse-pki-cert.d.ts | 36 - ui/vendor/string-includes.js | 5 - ui/yarn.lock | 44338 ++-- vault/acl.go | 3 - vault/acl_test.go | 3 - vault/acl_util.go | 3 - vault/acme_billing_system_view.go | 143 - vault/activity/activity_log.pb.go | 93 +- vault/activity/activity_log.proto | 6 - vault/activity/generation/generate_data.pb.go | 72 +- vault/activity/generation/generate_data.proto | 11 +- vault/activity/query.go | 3 - vault/activity/query_test.go | 3 - vault/activity_log.go | 800 +- vault/activity_log_test.go | 639 +- vault/activity_log_testing_util.go | 9 +- vault/activity_log_util.go | 11 +- vault/activity_log_util_common.go | 5 +- vault/activity_log_util_common_test.go | 8 - vault/audit.go | 7 +- vault/audit_broker.go | 7 +- vault/audit_test.go | 33 +- vault/audited_headers.go | 3 - vault/audited_headers_test.go | 3 - vault/auth.go | 7 +- vault/auth_test.go | 3 - vault/barrier.go | 3 - vault/barrier_access.go | 3 - vault/barrier_aes_gcm.go | 3 - vault/barrier_aes_gcm_test.go | 3 - vault/barrier_test.go | 3 - vault/barrier_view.go | 3 - vault/barrier_view_test.go | 3 - vault/barrier_view_util.go | 3 - vault/capabilities.go | 3 - vault/capabilities_test.go | 3 - vault/census.go | 16 - vault/cluster.go | 3 - vault/cluster/cluster.go | 3 - vault/cluster/inmem_layer.go | 3 - vault/cluster/inmem_layer_test.go | 3 - vault/cluster/simulations.go | 3 - vault/cluster/tcp_layer.go | 3 - vault/cluster_test.go | 3 - vault/core.go | 54 +- vault/core_metrics.go | 47 - vault/core_metrics_test.go | 94 - vault/core_test.go | 3 - vault/core_util.go | 3 - vault/core_util_common.go | 3 - vault/cors.go | 3 - vault/counters.go | 3 - vault/counters_test.go | 16 +- vault/custom_response_headers.go | 3 - vault/custom_response_headers_test.go | 29 +- vault/diagnose/constants.go | 3 - vault/diagnose/file_checks.go | 3 - vault/diagnose/file_checks_test.go | 3 - vault/diagnose/file_checks_unix.go | 3 - vault/diagnose/file_checks_windows.go | 3 - vault/diagnose/helpers.go | 3 - vault/diagnose/helpers_test.go | 3 - vault/diagnose/mock_storage_backend.go | 3 - vault/diagnose/os_common.go | 3 - vault/diagnose/os_openbsd_arm.go | 3 - vault/diagnose/os_unix.go | 3 - vault/diagnose/os_windows.go | 3 - vault/diagnose/output.go | 3 - vault/diagnose/raft_checks.go | 3 - vault/diagnose/storage_checks.go | 3 - vault/diagnose/storage_checks_test.go | 3 - vault/diagnose/tls_verification.go | 3 - vault/diagnose/tls_verification_test.go | 3 - vault/dynamic_system_view.go | 3 - vault/dynamic_system_view_test.go | 3 - vault/eventbus/bus.go | 3 - vault/eventbus/bus_test.go | 3 - vault/events_test.go | 3 - vault/expiration.go | 3 - vault/expiration_integ_test.go | 24 +- vault/expiration_test.go | 3 - vault/expiration_testing_util_common.go | 3 - vault/expiration_util.go | 3 - vault/external_plugin_test.go | 3 - .../api/api_integration_test.go | 3 - .../api/feature_flag_ext_test.go | 7 +- vault/external_tests/api/kv_helpers_test.go | 3 - .../api/renewer_integration_test.go | 3 - vault/external_tests/api/secret_test.go | 3 - vault/external_tests/api/sudo_paths_test.go | 3 - .../external_tests/api/sys_rekey_ext_test.go | 3 - .../approle/wrapped_secretid_test.go | 65 +- .../expiration/expiration_test.go | 32 +- .../external_tests/hcp_link/hcp_link_test.go | 4 +- vault/external_tests/hcp_link/test_helpers.go | 3 - vault/external_tests/identity/aliases_test.go | 118 +- .../external_tests/identity/entities_test.go | 39 +- .../identity/group_aliases_test.go | 24 +- vault/external_tests/identity/groups_test.go | 38 +- .../external_tests/identity/identity_test.go | 86 +- .../identity/login_mfa_duo_test.go | 28 +- .../identity/login_mfa_okta_test.go | 37 +- .../identity/login_mfa_totp_test.go | 3 - .../identity/oidc_provider_test.go | 3 - .../identity/userlockouts_test.go | 3 - vault/external_tests/kv/kv_patch_test.go | 67 +- vault/external_tests/kv/kv_subkeys_test.go | 84 +- vault/external_tests/kv/kvv2_upgrade_test.go | 16 +- .../metrics/core_metrics_int_test.go | 3 - vault/external_tests/mfa/login_mfa_test.go | 71 +- .../misc/misc_binary/recovery_test.go | 175 - .../misc/recover_from_panic_test.go | 3 - vault/external_tests/misc/recovery_test.go | 3 - .../plugin/external_plugin_test.go | 910 +- vault/external_tests/plugin/plugin_test.go | 163 +- .../policy/acl_templating_test.go | 22 +- vault/external_tests/policy/policy_test.go | 28 +- vault/external_tests/pprof/pprof.go | 140 - .../pprof/pprof_binary/pprof_test.go | 58 - vault/external_tests/pprof/pprof_test.go | 131 +- vault/external_tests/quotas/quotas_test.go | 10 +- vault/external_tests/raft/raft.go | 46 - .../raft/raft_autopilot_test.go | 23 +- .../raft/raft_binary/raft_test.go | 46 - vault/external_tests/raft/raft_test.go | 47 +- vault/external_tests/raftha/raft_ha_test.go | 3 - .../replication_binary/replication_test.go | 35 - .../response/allowed_response_headers_test.go | 3 - .../external_tests/router/router_ext_test.go | 25 +- .../seal_migration_pre14_test.go | 3 - .../sealmigration/seal_migration_test.go | 3 - .../sealmigration/testshared.go | 3 - .../seal_migration_pre14_test.go | 3 - .../external_tests/token/batch_token_test.go | 44 +- vault/external_tests/token/token_test.go | 68 +- vault/forwarded_writer_oss.go | 3 - vault/generate_root.go | 3 - vault/generate_root_recovery.go | 3 - vault/generate_root_test.go | 3 - vault/ha.go | 15 +- vault/ha_test.go | 3 - .../api_capability/api_capability.go | 3 - .../api_capability/api_passthrough.go | 3 - .../api_capability/token_manager.go | 3 - .../api_capability/token_manager_test.go | 3 - vault/hcp_link/capabilities/capability.go | 3 - .../capabilities/link_control/link_control.go | 3 - vault/hcp_link/capabilities/meta/meta.go | 3 - .../capabilities/node_status/node_status.go | 7 +- vault/hcp_link/internal/config.go | 3 - vault/hcp_link/internal/wrapped_hcpLink.go | 3 - vault/hcp_link/link.go | 3 - .../proto/link_control/link_control.pb.go | 3 - .../proto/link_control/link_control.proto | 3 - vault/hcp_link/proto/meta/meta.pb.go | 3 - vault/hcp_link/proto/meta/meta.proto | 3 - vault/hcp_link/proto/node_status/status.pb.go | 3 - vault/hcp_link/proto/node_status/status.proto | 3 - vault/identity_lookup.go | 15 - vault/identity_lookup_test.go | 3 - vault/identity_store.go | 146 +- vault/identity_store_aliases.go | 47 +- vault/identity_store_aliases_test.go | 3 - vault/identity_store_entities.go | 100 +- vault/identity_store_entities_test.go | 3 - vault/identity_store_group_aliases.go | 47 +- vault/identity_store_group_aliases_test.go | 3 - vault/identity_store_groups.go | 87 +- vault/identity_store_groups_test.go | 3 - vault/identity_store_oidc.go | 70 +- vault/identity_store_oidc_provider.go | 72 +- vault/identity_store_oidc_provider_test.go | 3 - vault/identity_store_oidc_provider_util.go | 5 +- vault/identity_store_oidc_test.go | 7 +- vault/identity_store_oidc_util.go | 3 - vault/identity_store_oss.go | 3 - vault/identity_store_schema.go | 3 - vault/identity_store_structs.go | 3 - vault/identity_store_test.go | 3 - vault/identity_store_upgrade.go | 87 +- vault/identity_store_util.go | 3 - vault/init.go | 10 +- vault/init_test.go | 3 - vault/inspectable.go | 3 - vault/inspectable_test.go | 3 - vault/keyring.go | 3 - vault/keyring_test.go | 3 - vault/logical_cubbyhole.go | 30 +- vault/logical_cubbyhole_test.go | 3 - vault/logical_passthrough.go | 3 - vault/logical_passthrough_test.go | 3 - vault/logical_raw.go | 76 +- vault/logical_system.go | 17 +- vault/logical_system_activity.go | 92 +- .../logical_system_activity_write_testonly.go | 395 +- ...cal_system_activity_write_testonly_test.go | 523 - vault/logical_system_helpers.go | 188 +- vault/logical_system_integ_test.go | 25 +- vault/logical_system_paths.go | 2648 +- vault/logical_system_pprof.go | 137 +- vault/logical_system_quotas.go | 115 - vault/logical_system_raft.go | 3 - vault/logical_system_test.go | 491 +- vault/logical_system_user_lockout.go | 3 - vault/logical_system_util.go | 3 - vault/login_mfa.go | 16 +- vault/login_mfa_test.go | 3 - vault/managed_key_registry.go | 3 - vault/mfa_auth_resp_priority_queue.go | 3 - vault/mfa_auth_resp_priority_queue_test.go | 3 - vault/mount.go | 26 +- vault/mount_test.go | 3 - vault/mount_util.go | 12 +- vault/mount_util_shared.go | 3 - vault/namespaces.go | 3 - vault/namespaces_oss.go | 3 - vault/password_policy_util.go | 3 - vault/plugin_catalog.go | 3 - vault/plugin_catalog_test.go | 3 - vault/plugin_reload.go | 5 +- vault/policy.go | 3 - vault/policy_store.go | 57 - vault/policy_store_test.go | 37 - vault/policy_store_util.go | 3 - vault/policy_test.go | 3 - vault/policy_util.go | 3 - vault/quotas/quotas.go | 3 - vault/quotas/quotas_rate_limit.go | 3 - vault/quotas/quotas_rate_limit_test.go | 3 - vault/quotas/quotas_test.go | 3 - vault/quotas/quotas_util.go | 3 - vault/raft.go | 5 +- vault/rekey.go | 15 +- vault/rekey_test.go | 3 - vault/replication/cluster.go | 3 - vault/request_forwarding.go | 3 - vault/request_forwarding_rpc.go | 3 - vault/request_forwarding_rpc_util.go | 3 - vault/request_forwarding_service.pb.go | 3 - vault/request_forwarding_service.proto | 3 - vault/request_handling.go | 28 +- vault/request_handling_test.go | 3 - vault/request_handling_util.go | 3 - vault/rollback.go | 3 - vault/rollback_test.go | 3 - vault/router.go | 20 +- vault/router_access.go | 3 - vault/router_test.go | 3 - vault/router_testing.go | 3 - vault/seal.go | 84 +- vault/seal/envelope.go | 3 - vault/seal/envelope_test.go | 3 - vault/seal/seal.go | 64 +- vault/seal/seal_testing.go | 31 +- vault/seal_access.go | 6 +- vault/seal_autoseal.go | 65 +- vault/seal_autoseal_test.go | 7 +- vault/seal_test.go | 3 - vault/seal_testing.go | 3 - vault/seal_testing_util.go | 14 +- vault/seal_util.go | 162 - vault/sealunwrapper.go | 99 +- vault/sealunwrapper_test.go | 3 - vault/test_cluster_detect_deadlock.go | 3 - vault/test_cluster_do_not_detect_deadlock.go | 3 - vault/testing.go | 94 +- vault/testing_util.go | 3 - vault/token_store.go | 130 +- vault/token_store_test.go | 17 - vault/token_store_util.go | 3 - vault/token_store_util_common.go | 3 - vault/tokens/token.pb.go | 3 - vault/tokens/token.proto | 3 - vault/ui.go | 3 - vault/ui_test.go | 3 - vault/util.go | 3 - vault/util_test.go | 3 - vault/vault_version_time.go | 3 - vault/version_store.go | 3 - vault/version_store_test.go | 3 - vault/wrapping.go | 21 +- vault/wrapping_util.go | 3 - version/cgo.go | 3 - version/version.go | 3 - version/version_base.go | 7 +- website/.eslintrc.js | 5 - website/.stylelintrc.js | 5 - website/Dockerfile | 3 - website/README.md | 10 - website/content/api-docs/auth/alicloud.mdx | 2 +- website/content/api-docs/auth/aws.mdx | 2 +- website/content/api-docs/auth/azure.mdx | 2 +- website/content/api-docs/auth/cf.mdx | 2 +- website/content/api-docs/auth/gcp.mdx | 2 +- website/content/api-docs/auth/jwt.mdx | 8 +- website/content/api-docs/auth/kubernetes.mdx | 2 +- website/content/api-docs/auth/ldap.mdx | 3 - website/content/api-docs/auth/oci.mdx | 2 +- website/content/api-docs/index.mdx | 56 +- website/content/api-docs/secret/aws.mdx | 137 +- .../api-docs/secret/databases/index.mdx | 4 +- .../secret/databases/mongodbatlas.mdx | 4 +- .../api-docs/secret/databases/postgresql.mdx | 8 - .../content/api-docs/secret/kubernetes.mdx | 12 +- website/content/api-docs/secret/kv/kv-v2.mdx | 136 +- .../content/api-docs/secret/mongodbatlas.mdx | 4 - website/content/api-docs/secret/pki.mdx | 446 +- website/content/api-docs/secret/transit.mdx | 97 +- website/content/api-docs/system/audit.mdx | 4 - .../api-docs/system/internal-counters.mdx | 8 +- .../system/storage/raftautosnapshots.mdx | 3 +- .../agent/generate-config/index.mdx | 112 - .../agent/process-supervisor.mdx | 151 - .../autoauth/methods/index.mdx | 12 - .../agent-and-proxy/autoauth/methods/jwt.mdx | 30 - .../agent-and-proxy/autoauth/methods/oci.mdx | 43 - .../agent-and-proxy/autoauth/sinks/index.mdx | 11 - .../content/docs/agent-and-proxy/index.mdx | 109 - .../docs/agent-and-proxy/proxy/apiproxy.mdx | 87 - .../agent-and-proxy/proxy/caching/index.mdx | 242 - .../proxy/caching/persistent-caches/index.mdx | 42 - .../caching/persistent-caches/kubernetes.mdx | 22 - .../docs/agent-and-proxy/proxy/index.mdx | 354 - .../docs/agent-and-proxy/proxy/versions.mdx | 39 - .../{agent-and-proxy => }/agent/apiproxy.mdx | 13 +- .../autoauth/index.mdx | 66 +- .../autoauth/methods/alicloud.mdx | 6 +- .../autoauth/methods/approle.mdx | 10 +- .../autoauth/methods/aws.mdx | 8 +- .../autoauth/methods/azure.mdx | 6 +- .../autoauth/methods/cert.mdx | 13 +- .../autoauth/methods/cf.mdx | 4 +- .../autoauth/methods/gcp.mdx | 6 +- .../docs/agent/autoauth/methods/index.mdx | 9 + .../docs/agent/autoauth/methods/jwt.mdx | 20 + .../autoauth/methods/kerberos.mdx | 10 +- .../autoauth/methods/kubernetes.mdx | 6 +- .../autoauth/methods/token_file.mdx | 20 +- .../autoauth/sinks/file.mdx | 10 +- .../docs/agent/autoauth/sinks/index.mdx | 9 + .../agent/caching/index.mdx | 10 +- .../agent/caching/persistent-caches/index.mdx | 1 + .../caching/persistent-caches/kubernetes.mdx | 0 .../{agent-and-proxy => }/agent/index.mdx | 118 +- .../{agent-and-proxy => }/agent/template.mdx | 56 +- .../{agent-and-proxy => }/agent/versions.mdx | 0 .../{agent-and-proxy => }/agent/winsvc.mdx | 0 website/content/docs/audit/index.mdx | 4 - .../docs/auth/jwt/oidc-providers/ibmisam.mdx | 34 - .../docs/auth/jwt/oidc-providers/index.mdx | 1 - website/content/docs/auth/ldap.mdx | 4 - .../content/docs/commands/audit/disable.mdx | 4 - website/content/docs/commands/audit/index.mdx | 4 - website/content/docs/commands/kv/list.mdx | 8 +- .../docs/commands/pki/health-check.mdx | 37 +- .../docs/concepts/client-count/index.mdx | 38 +- website/content/docs/concepts/policies.mdx | 93 +- website/content/docs/deprecation/index.mdx | 2 +- .../content/docs/enterprise/consistency.mdx | 16 +- .../license/utilization-reporting.mdx | 281 - website/content/docs/partnerships.mdx | 10 +- .../docs/platform/aws/lambda-extension.mdx | 6 +- .../docs/platform/k8s/helm/configuration.mdx | 4 +- .../k8s/helm/examples/kubernetes-auth.mdx | 24 +- .../docs/platform/k8s/injector-csi.mdx | 4 +- .../platform/k8s/injector/annotations.mdx | 22 +- .../docs/platform/k8s/injector/examples.mdx | 2 +- .../docs/platform/k8s/injector/index.mdx | 4 +- .../docs/platform/servicenow/installation.mdx | 4 +- website/content/docs/release-notes/1.14.0.mdx | 29 - website/content/docs/secrets/aws.mdx | 14 +- website/content/docs/secrets/azure.mdx | 14 - website/content/docs/secrets/consul.mdx | 2 + .../content/docs/secrets/databases/index.mdx | 30 +- .../docs/secrets/databases/mongodbatlas.mdx | 87 +- .../docs/secrets/databases/postgresql.mdx | 3 +- website/content/docs/secrets/kubernetes.mdx | 39 - website/content/docs/secrets/kv/kv-v1.mdx | 2 +- .../docs/secrets/pki/considerations.mdx | 4 +- website/content/docs/upgrading/plugins.mdx | 7 +- .../docs/upgrading/upgrade-to-1.14.x.mdx | 25 - website/content/docs/use-cases.mdx | 2 +- .../partials/db-secrets-credential-types.mdx | 15 +- .../ui-pki-control-groups-known-issue.mdx | 12 - website/data/docs-nav-data.json | 196 +- website/package-lock.json | 2219 +- website/package.json | 6 +- website/prettier.config.js | 5 - website/public/ie-custom-properties.js | 5 - website/public/img/vault-agent-workflow.png | Bin 121516 -> 0 bytes website/redirects.js | 50 - website/scripts/should-build.sh | 3 - website/scripts/website-build.sh | 3 - website/scripts/website-start.sh | 3 - 4214 files changed, 360264 insertions(+), 112238 deletions(-) delete mode 100644 .github/workflows/drepecated-functions-checker.yml delete mode 100644 .github/workflows/nil-nil-function-checker.yml create mode 100644 .github/workflows/test-link-rewrites.yml delete mode 100644 api/output_policy_test.go delete mode 100644 api/replication_status.go delete mode 100644 builtin/logical/aws/path_static_creds.go delete mode 100644 builtin/logical/aws/path_static_creds_test.go delete mode 100644 builtin/logical/aws/path_static_roles.go delete mode 100644 builtin/logical/aws/path_static_roles_test.go delete mode 100644 builtin/logical/aws/rotation.go delete mode 100644 builtin/logical/aws/rotation_test.go delete mode 100644 builtin/logical/pki/acme_authorizations.go delete mode 100644 builtin/logical/pki/acme_billing.go delete mode 100644 builtin/logical/pki/acme_billing_test.go delete mode 100644 builtin/logical/pki/acme_challenge_engine.go delete mode 100644 builtin/logical/pki/acme_challenges.go delete mode 100644 builtin/logical/pki/acme_challenges_test.go delete mode 100644 builtin/logical/pki/acme_eab_policy.go delete mode 100644 builtin/logical/pki/acme_errors.go delete mode 100644 builtin/logical/pki/acme_jws.go delete mode 100644 builtin/logical/pki/acme_state.go delete mode 100644 builtin/logical/pki/acme_state_test.go delete mode 100644 builtin/logical/pki/acme_wrappers.go delete mode 100644 builtin/logical/pki/acme_wrappers_test.go delete mode 100644 builtin/logical/pki/dnstest/server.go delete mode 100644 builtin/logical/pki/path_acme_account.go delete mode 100644 builtin/logical/pki/path_acme_authorizations.go delete mode 100644 builtin/logical/pki/path_acme_challenges.go delete mode 100644 builtin/logical/pki/path_acme_directory.go delete mode 100644 builtin/logical/pki/path_acme_eab.go delete mode 100644 builtin/logical/pki/path_acme_nonce.go delete mode 100644 builtin/logical/pki/path_acme_order.go delete mode 100644 builtin/logical/pki/path_acme_order_test.go delete mode 100644 builtin/logical/pki/path_acme_revoke.go delete mode 100644 builtin/logical/pki/path_acme_test.go delete mode 100644 builtin/logical/pki/path_config_acme.go delete mode 100644 builtin/logical/pki/path_config_acme_test.go delete mode 100644 builtin/logical/pkiext/pkiext_binary/acme_test.go delete mode 100644 builtin/logical/pkiext/pkiext_binary/pki_cluster.go delete mode 100644 builtin/logical/pkiext/pkiext_binary/pki_mount.go delete mode 100644 builtin/logical/transit/path_byok.go delete mode 100644 builtin/logical/transit/path_byok_test.go delete mode 100644 changelog/17919.txt delete mode 100644 changelog/17934.txt delete mode 100644 changelog/18186.txt delete mode 100644 changelog/18225.txt delete mode 100644 changelog/18230.txt delete mode 100644 changelog/18376.txt delete mode 100644 changelog/18465.txt delete mode 100644 changelog/18468.txt delete mode 100644 changelog/18472.txt delete mode 100644 changelog/18515.txt delete mode 100644 changelog/18521.txt delete mode 100644 changelog/18542.txt delete mode 100644 changelog/18624.txt delete mode 100644 changelog/18625.txt delete mode 100644 changelog/18626.txt delete mode 100644 changelog/18627.txt delete mode 100644 changelog/18628.txt delete mode 100644 changelog/18633.txt delete mode 100644 changelog/18729.txt delete mode 100644 changelog/18863.txt delete mode 100644 changelog/18870.txt delete mode 100644 changelog/19002.txt delete mode 100644 changelog/19043.txt delete mode 100644 changelog/19103.txt delete mode 100644 changelog/19135.txt delete mode 100644 changelog/19139.txt delete mode 100644 changelog/19160.txt delete mode 100644 changelog/19170.txt delete mode 100644 changelog/19187.txt delete mode 100644 changelog/19215.txt create mode 100644 changelog/19230.txt delete mode 100644 changelog/19247.txt delete mode 100644 changelog/19252.txt delete mode 100644 changelog/19260.txt delete mode 100644 changelog/19296.txt delete mode 100644 changelog/19319.txt delete mode 100644 changelog/19365.txt delete mode 100644 changelog/19378.txt delete mode 100644 changelog/19416.txt delete mode 100644 changelog/19468.txt delete mode 100644 changelog/19495.txt delete mode 100644 changelog/19519.txt delete mode 100644 changelog/19520.txt delete mode 100644 changelog/19616.txt delete mode 100644 changelog/19776.txt delete mode 100644 changelog/19791.txt delete mode 100644 changelog/19798.txt delete mode 100644 changelog/19814.txt delete mode 100644 changelog/19829.txt delete mode 100644 changelog/19846.txt delete mode 100644 changelog/19861.txt delete mode 100644 changelog/19862.txt delete mode 100644 changelog/19878.txt delete mode 100644 changelog/19891.txt delete mode 100644 changelog/19901.txt delete mode 100644 changelog/19913.txt delete mode 100644 changelog/19954.txt delete mode 100644 changelog/19993.txt delete mode 100644 changelog/20073.txt delete mode 100644 changelog/20078.txt delete mode 100644 changelog/20086.txt delete mode 100644 changelog/20125.txt delete mode 100644 changelog/20150.txt delete mode 100644 changelog/20163.txt delete mode 100644 changelog/20224.txt delete mode 100644 changelog/20234.txt delete mode 100644 changelog/20247.txt delete mode 100644 changelog/20253.txt delete mode 100644 changelog/20261.txt delete mode 100644 changelog/20265.txt delete mode 100644 changelog/20276.txt delete mode 100644 changelog/20285.txt delete mode 100644 changelog/20425.txt delete mode 100644 changelog/20430.txt delete mode 100644 changelog/20431.txt delete mode 100644 changelog/20441.txt delete mode 100644 changelog/20442.txt delete mode 100644 changelog/20464.txt delete mode 100644 changelog/20481.txt delete mode 100644 changelog/20488.txt delete mode 100644 changelog/20530.txt delete mode 100644 changelog/20536.txt delete mode 100644 changelog/20548.txt delete mode 100644 changelog/20559.txt delete mode 100644 changelog/20569.txt delete mode 100644 changelog/20590.txt delete mode 100644 changelog/20626.txt delete mode 100644 changelog/20628.txt delete mode 100644 changelog/20629.txt delete mode 100644 changelog/20652.txt delete mode 100644 changelog/20654.txt delete mode 100644 changelog/20664.txt delete mode 100644 changelog/20680.txt delete mode 100644 changelog/20694.txt delete mode 100644 changelog/20697.txt delete mode 100644 changelog/20725.txt delete mode 100644 changelog/20731.txt delete mode 100644 changelog/20736.txt delete mode 100644 changelog/20741.txt delete mode 100644 changelog/20742.txt delete mode 100644 changelog/20745.txt delete mode 100644 changelog/20747.txt delete mode 100644 changelog/20750.txt delete mode 100644 changelog/20751.txt delete mode 100644 changelog/20752.txt delete mode 100644 changelog/20758.txt delete mode 100644 changelog/20763.txt delete mode 100644 changelog/20764.txt delete mode 100644 changelog/20767.txt delete mode 100644 changelog/20771.txt delete mode 100644 changelog/20777.txt delete mode 100644 changelog/20784.txt delete mode 100644 changelog/20787.txt delete mode 100644 changelog/20799.txt delete mode 100644 changelog/20802.txt delete mode 100644 changelog/20807.txt delete mode 100644 changelog/20816.txt delete mode 100644 changelog/20818.txt delete mode 100644 changelog/20834.txt delete mode 100644 changelog/20882.txt delete mode 100644 changelog/20891.txt delete mode 100644 changelog/20933.txt delete mode 100644 changelog/20934.txt delete mode 100644 changelog/20943.txt delete mode 100644 changelog/20981.txt delete mode 100644 changelog/20995.txt delete mode 100644 changelog/21010.txt create mode 100644 changelog/_go-ver-1132.txt rename changelog/{_go-ver-1140.txt => _go-ver-1133.txt} (100%) delete mode 100644 changelog/pki-ui-improvements.txt rename command/{agentproxyshared => agent}/auth/alicloud/alicloud.go (98%) rename command/{agentproxyshared => agent}/auth/approle/approle.go (98%) rename command/{agentproxyshared => agent}/auth/auth.go (79%) rename command/{agentproxyshared => agent}/auth/auth_test.go (95%) rename command/{agentproxyshared => agent}/auth/aws/aws.go (98%) rename command/{agentproxyshared => agent}/auth/azure/azure.go (97%) rename command/{agentproxyshared => agent}/auth/cert/cert.go (89%) rename command/{agentproxyshared => agent}/auth/cert/cert_test.go (67%) rename command/{agentproxyshared => agent}/auth/cert/test-fixtures/keys/cert.pem (100%) rename command/{agentproxyshared => agent}/auth/cert/test-fixtures/keys/key.pem (100%) rename command/{agentproxyshared => agent}/auth/cert/test-fixtures/keys/pkioutput (100%) rename command/{agentproxyshared => agent}/auth/cert/test-fixtures/root/pkioutput (100%) rename command/{agentproxyshared => agent}/auth/cert/test-fixtures/root/root.crl (100%) rename command/{agentproxyshared => agent}/auth/cert/test-fixtures/root/rootcacert.pem (100%) rename command/{agentproxyshared => agent}/auth/cert/test-fixtures/root/rootcakey.pem (100%) rename command/{agentproxyshared => agent}/auth/cf/cf.go (94%) rename command/{agentproxyshared => agent}/auth/gcp/gcp.go (97%) rename command/{agentproxyshared => agent}/auth/jwt/jwt.go (61%) rename command/{agentproxyshared => agent}/auth/jwt/jwt_test.go (62%) rename command/{agentproxyshared => agent}/auth/kerberos/integtest/integrationtest.sh (98%) rename command/{agentproxyshared => agent}/auth/kerberos/kerberos.go (95%) rename command/{agentproxyshared => agent}/auth/kerberos/kerberos_test.go (94%) rename command/{agentproxyshared => agent}/auth/kubernetes/kubernetes.go (95%) rename command/{agentproxyshared => agent}/auth/kubernetes/kubernetes_test.go (97%) rename command/{agentproxyshared => agent}/auth/token-file/token_file.go (93%) rename command/{agentproxyshared => agent}/auth/token-file/token_file_test.go (94%) rename command/{agentproxyshared => agent}/cache/api_proxy.go (74%) rename command/{agentproxyshared => agent}/cache/api_proxy_test.go (89%) rename command/{agentproxyshared => agent}/cache/cache_test.go (99%) rename command/{agentproxyshared => agent}/cache/cacheboltdb/bolt.go (99%) rename command/{agentproxyshared => agent}/cache/cacheboltdb/bolt_test.go (98%) rename command/{agentproxyshared => agent}/cache/cachememdb/cache_memdb.go (98%) rename command/{agentproxyshared => agent}/cache/cachememdb/cache_memdb_test.go (99%) rename command/{agentproxyshared => agent}/cache/cachememdb/index.go (98%) rename command/{agentproxyshared => agent}/cache/cachememdb/index_test.go (94%) rename command/{agentproxyshared => agent}/cache/handler.go (97%) rename command/{agentproxyshared => agent}/cache/keymanager/manager.go (88%) rename command/{agentproxyshared => agent}/cache/keymanager/passthrough.go (96%) rename command/{agentproxyshared => agent}/cache/keymanager/passthrough_test.go (93%) rename command/{agentproxyshared => agent}/cache/lease_cache.go (97%) rename command/{agentproxyshared => agent}/cache/lease_cache_test.go (97%) rename command/{agentproxyshared => agent}/cache/listener.go (96%) rename command/{agentproxyshared => agent}/cache/proxy.go (96%) rename command/{agentproxyshared => agent}/cache/testing.go (89%) delete mode 100644 command/agent/config/test-fixtures/bad-config-env-templates-disalowed-fields.hcl delete mode 100644 command/agent/config/test-fixtures/bad-config-env-templates-invalid-signal.hcl delete mode 100644 command/agent/config/test-fixtures/bad-config-env-templates-missing-exec.hcl delete mode 100644 command/agent/config/test-fixtures/bad-config-env-templates-no-name.hcl delete mode 100644 command/agent/config/test-fixtures/bad-config-env-templates-with-file-templates.hcl delete mode 100644 command/agent/config/test-fixtures/bad-config-env-templates-with-proxy.hcl delete mode 100644 command/agent/config/test-fixtures/config-env-templates-complex.hcl delete mode 100644 command/agent/config/test-fixtures/config-env-templates-simple.hcl delete mode 100644 command/agent/config/test-fixtures/config-env-templates-with-source.hcl delete mode 100644 command/agent/config/test-fixtures/config-template-with-cache.hcl delete mode 100644 command/agent/exec/exec.go delete mode 100644 command/agent/exec/exec_test.go delete mode 100644 command/agent/exec/test-app/main.go delete mode 100644 command/agent/internal/ctmanager/runner_config.go delete mode 100644 command/agent/oci_end_to_end_test.go rename command/{agentproxyshared => agent}/sink/file/file_sink.go (96%) rename command/{agentproxyshared => agent}/sink/file/file_sink_test.go (95%) rename command/{agentproxyshared => agent}/sink/file/sink_test.go (95%) rename command/{agentproxyshared => agent}/sink/inmem/inmem_sink.go (81%) rename command/{agentproxyshared => agent}/sink/mock/mock_sink.go (68%) rename command/{agentproxyshared => agent}/sink/sink.go (98%) rename command/{agentproxyshared => agent}/winsvc/service.go (76%) rename command/{agentproxyshared => agent}/winsvc/service_windows.go (93%) delete mode 100644 command/agent_generate_config.go delete mode 100644 command/agent_generate_config_test.go delete mode 100644 command/agentproxyshared/auth/oci/oci.go delete mode 100644 command/agentproxyshared/helpers.go delete mode 100644 command/agentproxyshared/helpers_test.go delete mode 100644 command/healthcheck/pki_allow_acme_headers.go delete mode 100644 command/healthcheck/pki_enable_acme_issuance.go delete mode 100644 command/kv_helpers_test.go delete mode 100644 command/proxy.go delete mode 100644 command/proxy/config/config.go delete mode 100644 command/proxy/config/config_test.go delete mode 100644 command/proxy/config/test-fixtures/config-cache-embedded-type.hcl delete mode 100644 command/proxy/config/test-fixtures/config-cache.hcl delete mode 100644 command/proxy/test-fixtures/reload/reload_bar.key delete mode 100644 command/proxy/test-fixtures/reload/reload_bar.pem delete mode 100644 command/proxy/test-fixtures/reload/reload_ca.pem delete mode 100644 command/proxy/test-fixtures/reload/reload_foo.key delete mode 100644 command/proxy/test-fixtures/reload/reload_foo.pem delete mode 100644 command/proxy_test.go rename {sdk/helper => helper/testhelpers}/docker/testhelpers.go (65%) delete mode 100644 helper/testhelpers/minimal/minimal.go delete mode 100644 plugins/database/postgresql/passwordauthentication.go delete mode 100644 plugins/database/postgresql/scram/LICENSE delete mode 100644 plugins/database/postgresql/scram/scram.go delete mode 100644 plugins/database/postgresql/scram/scram_test.go delete mode 100755 scripts/deprecations-checker.sh delete mode 100644 sdk/helper/consts/proxy.go delete mode 100644 sdk/helper/testcluster/consts.go delete mode 100644 sdk/helper/testcluster/docker/cert.go delete mode 100644 sdk/helper/testcluster/docker/environment.go delete mode 100644 sdk/helper/testcluster/docker/replication.go delete mode 100644 sdk/helper/testcluster/exec.go delete mode 100644 sdk/helper/testcluster/logging.go delete mode 100644 sdk/helper/testcluster/replication.go delete mode 100644 sdk/helper/testcluster/types.go delete mode 100644 sdk/helper/testcluster/util.go delete mode 100644 sdk/helper/testhelpers/output.go delete mode 100644 sdk/helper/testhelpers/output_test.go delete mode 100644 sdk/logical/acme_billing.go delete mode 100644 tools/gonilnilfunctions/main.go delete mode 100644 tools/gonilnilfunctions/pkg/analyzer/analyzer.go delete mode 100644 tools/gonilnilfunctions/pkg/analyzer/analyzer_test.go delete mode 100644 tools/gonilnilfunctions/pkg/analyzer/testdata/funcs.go create mode 100644 ui/.github/workflows/ci.yml create mode 100644 ui/.nvmrc create mode 100755 ui/.yarn/releases/yarn-1.19.1.js create mode 100755 ui/.yarn/releases/yarn-1.22.19.js delete mode 100755 ui/.yarn/releases/yarn-3.5.0.cjs delete mode 100644 ui/.yarnrc.yml create mode 100644 ui/app/adapters/pki-ca-certificate-sign.js create mode 100644 ui/app/adapters/pki-ca-certificate.js create mode 100644 ui/app/adapters/pki-certificate-sign.js create mode 100644 ui/app/adapters/pki.js delete mode 100644 ui/app/adapters/pki/config/acme.js delete mode 100644 ui/app/adapters/pki/config/base.js delete mode 100644 ui/app/adapters/pki/config/cluster.js delete mode 100644 ui/app/adapters/pki/config/crl.js delete mode 100644 ui/app/adapters/pki/config/urls.js create mode 100644 ui/app/adapters/pki/pki-config.js create mode 100644 ui/app/adapters/pki/pki-role.js delete mode 100644 ui/app/adapters/pki/tidy.js create mode 100644 ui/app/adapters/pki/urls.js create mode 100644 ui/app/components/alert-popup.js rename ui/app/components/{sidebar/user-menu.js => auth-info.js} (68%) create mode 100644 ui/app/components/cluster-info.js create mode 100644 ui/app/components/console/log-help.js create mode 100644 ui/app/components/home-link.js create mode 100644 ui/app/components/menu-sidebar.js create mode 100644 ui/app/components/nav-header.js create mode 100644 ui/app/components/nav-header/home.js create mode 100644 ui/app/components/nav-header/items.js create mode 100644 ui/app/components/nav-header/main.js create mode 100644 ui/app/components/pki/config-pki-ca.js create mode 100644 ui/app/components/pki/config-pki.js create mode 100644 ui/app/components/pki/pki-cert-popup.js create mode 100644 ui/app/components/pki/pki-cert-show.js create mode 100644 ui/app/components/pki/role-pki-edit.js delete mode 100644 ui/app/components/sidebar/frame.hbs delete mode 100644 ui/app/components/sidebar/frame.js delete mode 100644 ui/app/components/sidebar/nav/access.hbs delete mode 100644 ui/app/components/sidebar/nav/cluster.hbs delete mode 100644 ui/app/components/sidebar/nav/cluster.js delete mode 100644 ui/app/components/sidebar/nav/policies.hbs delete mode 100644 ui/app/components/sidebar/nav/tools.hbs delete mode 100644 ui/app/components/sidebar/user-menu.hbs create mode 100644 ui/app/components/status-menu.js delete mode 100644 ui/app/decorators/model-expanded-attributes.js rename ui/app/lib/{console-helpers.ts => console-helpers.js} (50%) create mode 100644 ui/app/models/pki-ca-certificate-sign.js create mode 100644 ui/app/models/pki-ca-certificate.js create mode 100644 ui/app/models/pki-certificate-sign.js create mode 100644 ui/app/models/pki/about-pki-naming.md create mode 100644 ui/app/models/pki/cert.js delete mode 100644 ui/app/models/pki/config/acme.js delete mode 100644 ui/app/models/pki/config/cluster.js delete mode 100644 ui/app/models/pki/config/crl.js create mode 100644 ui/app/models/pki/pki-config.js create mode 100644 ui/app/models/pki/pki-role.js delete mode 100644 ui/app/models/pki/tidy.js rename ui/app/models/pki/{config => }/urls.js (77%) delete mode 100644 ui/app/routes/vault/cluster/clients/index.js create mode 100644 ui/app/routes/vault/cluster/settings/configure-secret-backend/index.js create mode 100644 ui/app/routes/vault/cluster/settings/configure-secret-backend/section.js create mode 100644 ui/app/serializers/pki/cert.js create mode 100644 ui/app/serializers/pki/pki-config.js create mode 100644 ui/app/serializers/pki/pki-role.js delete mode 100644 ui/app/serializers/pki/tidy.js delete mode 100644 ui/app/styles/components/autocomplete-input.scss rename ui/app/styles/components/{empty-state-component.scss => empty-state.scss} (94%) create mode 100644 ui/app/styles/components/info-table.scss create mode 100644 ui/app/styles/components/input-hint.scss create mode 100644 ui/app/styles/components/linkable-item.scss rename ui/app/styles/components/{modal-component.scss => modal.scss} (60%) create mode 100644 ui/app/styles/components/replication-header.scss create mode 100644 ui/app/styles/components/status-menu.scss delete mode 100644 ui/app/styles/components/tabs-component.scss create mode 100644 ui/app/styles/components/tabs.scss create mode 100644 ui/app/styles/core/bulma-radio-checkboxes.scss delete mode 100644 ui/app/styles/core/checkbox-and-radio.scss delete mode 100644 ui/app/styles/core/columns.scss delete mode 100644 ui/app/styles/core/control.scss delete mode 100644 ui/app/styles/core/element-styling.scss delete mode 100644 ui/app/styles/core/field.scss delete mode 100644 ui/app/styles/core/file.scss create mode 100644 ui/app/styles/core/forms.scss create mode 100644 ui/app/styles/core/generic.scss create mode 100644 ui/app/styles/core/helpers.scss create mode 100644 ui/app/styles/core/hero.scss delete mode 100644 ui/app/styles/core/inputs.scss delete mode 100644 ui/app/styles/core/label.scss rename ui/app/styles/core/{containers.scss => layout.scss} (53%) delete mode 100644 ui/app/styles/core/link.scss create mode 100644 ui/app/styles/core/navbar.scss create mode 100644 ui/app/styles/core/notification.scss create mode 100644 ui/app/styles/core/tables.scss delete mode 100644 ui/app/styles/core/tag.scss create mode 100644 ui/app/styles/core/tags.scss create mode 100644 ui/app/styles/engines.scss delete mode 100644 ui/app/styles/helper-classes/colors.scss delete mode 100644 ui/app/styles/helper-classes/flexbox-and-grid.scss delete mode 100644 ui/app/styles/helper-classes/general.scss delete mode 100644 ui/app/styles/helper-classes/layout.scss delete mode 100644 ui/app/styles/helper-classes/spacing.scss delete mode 100644 ui/app/styles/helper-classes/typography.scss create mode 100644 ui/app/styles/pki/pki-not-valid-after-form.scss delete mode 100644 ui/app/styles/reset.scss delete mode 100644 ui/app/styles/utils/_box-shadow_variables.scss create mode 100644 ui/app/styles/utils/_bulma_variables.scss rename ui/app/styles/utils/{_color_variables.scss => _colors.scss} (81%) delete mode 100644 ui/app/styles/utils/_font_variables.scss delete mode 100644 ui/app/styles/utils/_size_variables.scss create mode 100644 ui/app/styles/utils/_spacing.scss rename ui/{lib/core/addon => app/templates}/components/alert-popup.hbs (53%) create mode 100644 ui/app/templates/components/auth-info.hbs create mode 100644 ui/app/templates/components/cluster-info.hbs create mode 100644 ui/app/templates/components/home-link.hbs create mode 100644 ui/app/templates/components/menu-sidebar.hbs create mode 100644 ui/app/templates/components/nav-header.hbs create mode 100644 ui/app/templates/components/pki/config-pki-ca.hbs create mode 100644 ui/app/templates/components/pki/config-pki.hbs create mode 100644 ui/app/templates/components/pki/configure-pki-secret.hbs create mode 100644 ui/app/templates/components/pki/pki-cert-popup.hbs create mode 100644 ui/app/templates/components/pki/pki-cert-show.hbs create mode 100644 ui/app/templates/components/pki/role-pki-edit.hbs create mode 100644 ui/app/templates/components/secret-list/pki-cert-item.hbs create mode 100644 ui/app/templates/components/secret-list/pki-role-item.hbs create mode 100644 ui/app/templates/components/status-menu.hbs delete mode 100644 ui/app/templates/vault/cluster/access/loading.hbs delete mode 100644 ui/app/templates/vault/cluster/clients/loading.hbs delete mode 100644 ui/app/templates/vault/cluster/settings/auth.hbs create mode 100644 ui/app/templates/vault/cluster/settings/configure-secret-backend/section.hbs delete mode 100644 ui/app/utils/camelize-object-keys.js create mode 100644 ui/blueprints/component/files/__root__/__path__/__name__.js create mode 100644 ui/blueprints/component/files/__root__/__templatepath__/__templatename__.hbs create mode 100644 ui/blueprints/component/index.js delete mode 100644 ui/docs/components.md delete mode 100644 ui/docs/css.md delete mode 100644 ui/docs/ember-engines.md delete mode 100644 ui/docs/forms.md delete mode 100644 ui/docs/model-validations.md delete mode 100644 ui/docs/models.md delete mode 100644 ui/docs/routing.md delete mode 100644 ui/docs/serializers-adapters.md create mode 100644 ui/lib/core/addon/components/empty-state.js create mode 100644 ui/lib/core/addon/components/field-group-show.js create mode 100644 ui/lib/core/addon/components/form-error.js delete mode 100644 ui/lib/core/addon/components/icon.hbs delete mode 100644 ui/lib/core/addon/components/info-tooltip.hbs create mode 100644 ui/lib/core/addon/components/layout-loading.js create mode 100644 ui/lib/core/addon/components/linkable-item.js create mode 100644 ui/lib/core/addon/components/linkable-item/content.js create mode 100644 ui/lib/core/addon/components/linkable-item/menu.js delete mode 100644 ui/lib/core/addon/components/masked-input.hbs delete mode 100644 ui/lib/core/addon/components/menu-loader.hbs create mode 100644 ui/lib/core/addon/components/menu-loader.js delete mode 100644 ui/lib/core/addon/components/navigate-input.hbs create mode 100644 ui/lib/core/addon/components/page-header-level-left.js create mode 100644 ui/lib/core/addon/components/page-header-level-right.js delete mode 100644 ui/lib/core/addon/components/page-header-level.hbs create mode 100644 ui/lib/core/addon/components/page-header-top.js delete mode 100644 ui/lib/core/addon/components/page-header.hbs delete mode 100644 ui/lib/core/addon/components/popup-menu.hbs create mode 100644 ui/lib/core/addon/components/popup-menu.js delete mode 100644 ui/lib/core/addon/components/search-select-placeholder.hbs create mode 100644 ui/lib/core/addon/components/search-select-placeholder.js create mode 100644 ui/lib/core/addon/components/toolbar-actions.js create mode 100644 ui/lib/core/addon/components/toolbar-filters.js create mode 100644 ui/lib/core/addon/components/toolbar.js create mode 100644 ui/lib/core/addon/components/vault-logo-spinner.js rename ui/lib/core/addon/{ => templates}/components/box-radio.hbs (100%) rename ui/lib/core/addon/{ => templates}/components/chevron.hbs (100%) rename ui/lib/core/addon/{ => templates}/components/empty-state.hbs (53%) rename ui/lib/core/addon/{ => templates}/components/field-group-show.hbs (86%) rename ui/lib/core/addon/{ => templates}/components/form-error.hbs (100%) create mode 100644 ui/lib/core/addon/templates/components/icon.hbs rename ui/lib/core/addon/{ => templates}/components/info-table.hbs (86%) create mode 100644 ui/lib/core/addon/templates/components/info-tooltip.hbs rename ui/lib/core/addon/{ => templates}/components/layout-loading.hbs (100%) create mode 100644 ui/lib/core/addon/templates/components/linkable-item.hbs create mode 100644 ui/lib/core/addon/templates/components/linkable-item/content.hbs create mode 100644 ui/lib/core/addon/templates/components/linkable-item/menu.hbs create mode 100644 ui/lib/core/addon/templates/components/masked-input.hbs create mode 100644 ui/lib/core/addon/templates/components/menu-loader.hbs rename ui/lib/core/addon/{ => templates}/components/namespace-reminder.hbs (92%) create mode 100644 ui/lib/core/addon/templates/components/navigate-input.hbs create mode 100644 ui/lib/core/addon/templates/components/page-header.hbs create mode 100644 ui/lib/core/addon/templates/components/popup-menu.hbs create mode 100644 ui/lib/core/addon/templates/components/search-select-placeholder.hbs rename ui/lib/core/addon/{ => templates}/components/toolbar-actions.hbs (100%) rename ui/lib/core/addon/{ => templates}/components/toolbar-filters.hbs (100%) rename ui/lib/core/addon/{ => templates}/components/toolbar.hbs (100%) rename ui/lib/core/addon/{ => templates}/components/upgrade-page.hbs (100%) rename ui/lib/core/addon/{ => templates}/components/vault-logo-spinner.hbs (100%) delete mode 100644 ui/lib/core/addon/utils/timestamp.js delete mode 100644 ui/lib/core/app/components/alert-popup.js create mode 100644 ui/lib/core/app/components/linkable-item.js create mode 100644 ui/lib/core/app/components/linkable-item/content.js create mode 100644 ui/lib/core/app/components/linkable-item/menu.js create mode 100644 ui/lib/core/app/components/page-header-level-left.js create mode 100644 ui/lib/core/app/components/page-header-level-right.js delete mode 100644 ui/lib/core/app/components/page-header-level.js create mode 100644 ui/lib/core/app/components/page-header-top.js delete mode 100644 ui/lib/core/app/utils/timestamp.js delete mode 100644 ui/lib/pki/addon/components/page/pki-configuration-details.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-configuration-details.ts delete mode 100644 ui/lib/pki/addon/components/page/pki-configuration-edit.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-configuration-edit.ts delete mode 100644 ui/lib/pki/addon/components/page/pki-configure-create.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-details.ts delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-generate-intermediate.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-generate-intermediate.ts delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-generate-root.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-generate-root.ts delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-import.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-import.ts delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-list.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-rotate-root.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-issuer-rotate-root.ts delete mode 100644 ui/lib/pki/addon/components/page/pki-tidy-auto-configure.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-tidy-auto-settings.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-tidy-manual.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-tidy-status.hbs delete mode 100644 ui/lib/pki/addon/components/page/pki-tidy-status.ts delete mode 100644 ui/lib/pki/addon/components/parsed-certificate-info-rows.hbs delete mode 100644 ui/lib/pki/addon/components/parsed-certificate-info-rows.ts create mode 100644 ui/lib/pki/addon/components/pki-ca-certificate-import.hbs create mode 100644 ui/lib/pki/addon/components/pki-ca-certificate-import.ts create mode 100644 ui/lib/pki/addon/components/pki-configure-form.hbs rename ui/lib/pki/addon/components/{page/pki-configure-create.ts => pki-configure-form.ts} (65%) create mode 100644 ui/lib/pki/addon/components/pki-generate-root.js delete mode 100644 ui/lib/pki/addon/components/pki-generate-root.ts delete mode 100644 ui/lib/pki/addon/components/pki-import-pem-bundle.hbs delete mode 100644 ui/lib/pki/addon/components/pki-import-pem-bundle.ts delete mode 100644 ui/lib/pki/addon/components/pki-info-table-rows.hbs rename ui/lib/pki/addon/components/{pki-key-form.ts => pki-key-form.js} (71%) rename ui/lib/pki/addon/components/{pki-key-import.ts => pki-key-import.js} (71%) create mode 100644 ui/lib/pki/addon/components/pki-key-parameters.js delete mode 100644 ui/lib/pki/addon/components/pki-key-parameters.ts create mode 100644 ui/lib/pki/addon/components/pki-role-form.js delete mode 100644 ui/lib/pki/addon/components/pki-role-form.ts delete mode 100644 ui/lib/pki/addon/components/pki-tidy-form.hbs delete mode 100644 ui/lib/pki/addon/components/pki-tidy-form.ts delete mode 100644 ui/lib/pki/addon/controllers/tidy/index.js delete mode 100644 ui/lib/pki/addon/decorators/check-config.js create mode 100644 ui/lib/pki/addon/routes/configuration/tidy.js rename ui/lib/pki/addon/routes/issuers/{issuer.js => issuer/index.js} (66%) delete mode 100644 ui/lib/pki/addon/routes/issuers/issuer/rotate-root.js delete mode 100644 ui/lib/pki/addon/routes/tidy.js delete mode 100644 ui/lib/pki/addon/routes/tidy/auto.js delete mode 100644 ui/lib/pki/addon/routes/tidy/auto/configure.js delete mode 100644 ui/lib/pki/addon/routes/tidy/auto/index.js delete mode 100644 ui/lib/pki/addon/routes/tidy/index.js delete mode 100644 ui/lib/pki/addon/routes/tidy/manual.js create mode 100644 ui/lib/pki/addon/templates/configuration/tidy.hbs delete mode 100644 ui/lib/pki/addon/templates/issuers/issuer/rotate-root.hbs delete mode 100644 ui/lib/pki/addon/templates/tidy.hbs delete mode 100644 ui/lib/pki/addon/templates/tidy/auto.hbs delete mode 100644 ui/lib/pki/addon/templates/tidy/auto/configure.hbs delete mode 100644 ui/lib/pki/addon/templates/tidy/auto/index.hbs delete mode 100644 ui/lib/pki/addon/templates/tidy/index.hbs delete mode 100644 ui/lib/pki/addon/templates/tidy/manual.hbs delete mode 100644 ui/mirage/helpers/modify-passthrough-response.js delete mode 100644 ui/public/images/pki-rotate-root.png delete mode 100644 ui/public/images/pki-tidy.png delete mode 100644 ui/tests/acceptance/enterprise-sidebar-nav-test.js rename ui/tests/acceptance/{pki/pki-engine-route-cleanup-test.js => pki-engine-route-cleanup-test.js-test.js} (91%) delete mode 100644 ui/tests/acceptance/pki/pki-action-forms-test.js delete mode 100644 ui/tests/acceptance/pki/pki-configuration-test.js delete mode 100644 ui/tests/acceptance/pki/pki-cross-sign-test.js delete mode 100644 ui/tests/acceptance/pki/pki-tidy-test.js create mode 100644 ui/tests/acceptance/secrets/backend/pki/cert-test.js create mode 100644 ui/tests/acceptance/secrets/backend/pki/list-test.js create mode 100644 ui/tests/acceptance/secrets/backend/pki/role-test.js create mode 100644 ui/tests/acceptance/settings/configure-secret-backends/pki/index-test.js create mode 100644 ui/tests/acceptance/settings/configure-secret-backends/pki/section-cert-test.js create mode 100644 ui/tests/acceptance/settings/configure-secret-backends/pki/section-crl-test.js create mode 100644 ui/tests/acceptance/settings/configure-secret-backends/pki/section-tidy-test.js create mode 100644 ui/tests/acceptance/settings/configure-secret-backends/pki/section-urls-test.js delete mode 100644 ui/tests/acceptance/sidebar-nav-test.js delete mode 100644 ui/tests/helpers/components/sidebar-nav.js delete mode 100644 ui/tests/helpers/pki/page/pki-configuration-edit.js delete mode 100644 ui/tests/helpers/pki/page/pki-tidy-form.js delete mode 100644 ui/tests/helpers/pki/page/pki-tidy.js delete mode 100644 ui/tests/helpers/pki/pki-configure-create.js create mode 100644 ui/tests/helpers/pki/pki-configure-form.js delete mode 100644 ui/tests/helpers/pki/pki-delete-all-issuers.js delete mode 100644 ui/tests/helpers/pki/pki-issuer-cross-sign.js delete mode 100644 ui/tests/integration/components/alert-popup-test.js delete mode 100644 ui/tests/integration/components/database-role-edit-test.js delete mode 100644 ui/tests/integration/components/download-button-test.js create mode 100644 ui/tests/integration/components/linkable-item-test.js create mode 100644 ui/tests/integration/components/nav-header-test.js create mode 100644 ui/tests/integration/components/pki-configure-form-test.js rename ui/tests/integration/components/{pki => }/pki-generate-root-test.js (97%) rename ui/tests/integration/components/{pki => }/pki-role-generate-test.js (97%) rename ui/tests/integration/components/{pki => }/pki-sign-intermediate-form-test.js (96%) create mode 100644 ui/tests/integration/components/pki/config-pki-ca-test.js create mode 100644 ui/tests/integration/components/pki/config-pki-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-configuration-details-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-configuration-edit-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-configure-create-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-issuer-generate-intermediate-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-issuer-generate-root-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-issuer-import-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-issuer-list-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-issuer-rotate-root-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-tidy-auto-settings-test.js delete mode 100644 ui/tests/integration/components/pki/page/pki-tidy-status-test.js delete mode 100644 ui/tests/integration/components/pki/parsed-certificate-info-rows-test.js delete mode 100644 ui/tests/integration/components/pki/pki-import-pem-bundle-test.js create mode 100644 ui/tests/integration/components/pki/pki-issuer-import-test.js delete mode 100644 ui/tests/integration/components/pki/pki-tidy-form-test.js delete mode 100644 ui/tests/integration/components/sidebar/frame-test.js delete mode 100644 ui/tests/integration/components/sidebar/nav/access-test.js delete mode 100644 ui/tests/integration/components/sidebar/nav/cluster-test.js delete mode 100644 ui/tests/integration/components/sidebar/nav/policies-test.js delete mode 100644 ui/tests/integration/components/sidebar/nav/tools-test.js delete mode 100644 ui/tests/integration/components/sidebar/user-menu-test.js delete mode 100644 ui/tests/integration/components/transit-edit-test.js create mode 100644 ui/tests/pages/components/nav-header.js create mode 100644 ui/tests/pages/components/pki/config-pki-ca.js create mode 100644 ui/tests/pages/components/pki/config-pki.js create mode 100644 ui/tests/pages/settings/configure-secret-backends/pki/index.js create mode 100644 ui/tests/pages/settings/configure-secret-backends/pki/section-cert.js create mode 100644 ui/tests/pages/settings/configure-secret-backends/pki/section.js delete mode 100644 ui/tests/unit/adapters/pki/config-test.js delete mode 100644 ui/tests/unit/adapters/pki/tidy-test.js create mode 100644 ui/tests/unit/adapters/pki/urls-test.js delete mode 100644 ui/tests/unit/decorators/model-expanded-attributes-test.js delete mode 100644 ui/tests/unit/utils/timestamp-test.js delete mode 100644 ui/types/vault/adapters/pki/issuer.d.ts delete mode 100644 ui/types/vault/adapters/pki/tidy.d.ts delete mode 100644 ui/types/vault/models/pki/certificate/sign-intermediate.d.ts delete mode 100644 ui/types/vault/models/pki/config/crl.d.ts delete mode 100644 ui/types/vault/models/pki/config/urls.d.ts delete mode 100644 ui/types/vault/models/pki/key.d.ts delete mode 100644 ui/types/vault/models/pki/role.d.ts delete mode 100644 ui/types/vault/models/pki/tidy.d.ts delete mode 100644 ui/types/vault/utils/camelize-object-keys.d.ts delete mode 100644 ui/types/vault/utils/parse-pki-cert.d.ts delete mode 100644 vault/acme_billing_system_view.go delete mode 100644 vault/census.go delete mode 100644 vault/external_tests/misc/misc_binary/recovery_test.go delete mode 100644 vault/external_tests/pprof/pprof.go delete mode 100644 vault/external_tests/pprof/pprof_binary/pprof_test.go delete mode 100644 vault/external_tests/raft/raft.go delete mode 100644 vault/external_tests/raft/raft_binary/raft_test.go delete mode 100644 vault/external_tests/replication_binary/replication_test.go delete mode 100644 vault/seal_util.go delete mode 100644 website/content/docs/agent-and-proxy/agent/generate-config/index.mdx delete mode 100644 website/content/docs/agent-and-proxy/agent/process-supervisor.mdx delete mode 100644 website/content/docs/agent-and-proxy/autoauth/methods/index.mdx delete mode 100644 website/content/docs/agent-and-proxy/autoauth/methods/jwt.mdx delete mode 100644 website/content/docs/agent-and-proxy/autoauth/methods/oci.mdx delete mode 100644 website/content/docs/agent-and-proxy/autoauth/sinks/index.mdx delete mode 100644 website/content/docs/agent-and-proxy/index.mdx delete mode 100644 website/content/docs/agent-and-proxy/proxy/apiproxy.mdx delete mode 100644 website/content/docs/agent-and-proxy/proxy/caching/index.mdx delete mode 100644 website/content/docs/agent-and-proxy/proxy/caching/persistent-caches/index.mdx delete mode 100644 website/content/docs/agent-and-proxy/proxy/caching/persistent-caches/kubernetes.mdx delete mode 100644 website/content/docs/agent-and-proxy/proxy/index.mdx delete mode 100644 website/content/docs/agent-and-proxy/proxy/versions.mdx rename website/content/docs/{agent-and-proxy => }/agent/apiproxy.mdx (79%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/index.mdx (83%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/alicloud.mdx (91%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/approle.mdx (91%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/aws.mdx (93%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/azure.mdx (88%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/cert.mdx (70%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/cf.mdx (80%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/gcp.mdx (89%) create mode 100644 website/content/docs/agent/autoauth/methods/index.mdx create mode 100644 website/content/docs/agent/autoauth/methods/jwt.mdx rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/kerberos.mdx (86%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/kubernetes.mdx (82%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/methods/token_file.mdx (59%) rename website/content/docs/{agent-and-proxy => agent}/autoauth/sinks/file.mdx (82%) create mode 100644 website/content/docs/agent/autoauth/sinks/index.mdx rename website/content/docs/{agent-and-proxy => }/agent/caching/index.mdx (95%) rename website/content/docs/{agent-and-proxy => }/agent/caching/persistent-caches/index.mdx (98%) rename website/content/docs/{agent-and-proxy => }/agent/caching/persistent-caches/kubernetes.mdx (100%) rename website/content/docs/{agent-and-proxy => }/agent/index.mdx (81%) rename website/content/docs/{agent-and-proxy => }/agent/template.mdx (86%) rename website/content/docs/{agent-and-proxy => }/agent/versions.mdx (100%) rename website/content/docs/{agent-and-proxy => }/agent/winsvc.mdx (100%) delete mode 100644 website/content/docs/auth/jwt/oidc-providers/ibmisam.mdx delete mode 100644 website/content/docs/enterprise/license/utilization-reporting.mdx delete mode 100644 website/content/docs/release-notes/1.14.0.mdx delete mode 100644 website/content/docs/upgrading/upgrade-to-1.14.x.mdx delete mode 100644 website/content/partials/ui-pki-control-groups-known-issue.mdx delete mode 100644 website/public/img/vault-agent-workflow.png diff --git a/.copywrite.hcl b/.copywrite.hcl index ab9fa58f3411e8..e3f120ea6d977c 100644 --- a/.copywrite.hcl +++ b/.copywrite.hcl @@ -8,9 +8,8 @@ project { # Supports doublestar glob patterns for more flexibility in defining which # files or folders should be ignored header_ignore = [ - "builtin/credential/aws/pkcs7/**", + "builtin/credentials/aws/pkcs7/**", "ui/node_modules/**", "enos/modules/k8s_deploy_vault/raft-config.hcl", - "plugins/database/postgresql/scram/**" ] } diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 23958d8352807b..9399e9b5dba9c4 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - contact_links: - name: Ask a question url: https://discuss.hashicorp.com/c/vault diff --git a/.github/scripts/generate-test-package-lists.sh b/.github/scripts/generate-test-package-lists.sh index 408e1780adb023..493a92c8c87c29 100755 --- a/.github/scripts/generate-test-package-lists.sh +++ b/.github/scripts/generate-test-package-lists.sh @@ -8,8 +8,6 @@ # solution. It distributes the entire set of test packages into 16 sublists, # which should roughly take an equal amount of time to complete. -set -e - test_packages=() base="github.com/hashicorp/vault" @@ -27,7 +25,7 @@ fi # Total time: 1009 test_packages[3]+=" $base/builtin/credential/approle" -test_packages[3]+=" $base/command/agentproxyshared/sink/file" +test_packages[3]+=" $base/command/agent/sink/file" test_packages[3]+=" $base/command/agent/template" test_packages[3]+=" $base/helper/random" test_packages[3]+=" $base/helper/storagepacker" @@ -46,7 +44,6 @@ test_packages[4]+=" $base/http" test_packages[4]+=" $base/sdk/helper/pluginutil" test_packages[4]+=" $base/serviceregistration/kubernetes" test_packages[4]+=" $base/tools/godoctests/pkg/analyzer" -test_packages[4]+=" $base/tools/gonilnilfunctions/pkg/analyzer" if [ "${ENTERPRISE:+x}" == "x" ] ; then test_packages[4]+=" $base/vault/external_tests/apilock" test_packages[4]+=" $base/vault/external_tests/filteredpaths" @@ -88,19 +85,16 @@ test_packages[6]+=" $base/builtin/audit/file" test_packages[6]+=" $base/builtin/credential/github" test_packages[6]+=" $base/builtin/credential/okta" test_packages[6]+=" $base/builtin/logical/database/dbplugin" -test_packages[6]+=" $base/command/agentproxyshared/auth/cert" -test_packages[6]+=" $base/command/agentproxyshared/auth/jwt" -test_packages[6]+=" $base/command/agentproxyshared/auth/kerberos" -test_packages[6]+=" $base/command/agentproxyshared/auth/kubernetes" -test_packages[6]+=" $base/command/agentproxyshared/auth/token-file" -test_packages[6]+=" $base/command/agentproxyshared" -test_packages[6]+=" $base/command/agentproxyshared/cache" -test_packages[6]+=" $base/command/agentproxyshared/cache/cacheboltdb" -test_packages[6]+=" $base/command/agentproxyshared/cache/cachememdb" -test_packages[6]+=" $base/command/agentproxyshared/cache/keymanager" +test_packages[6]+=" $base/command/agent/auth/cert" +test_packages[6]+=" $base/command/agent/auth/jwt" +test_packages[6]+=" $base/command/agent/auth/kerberos" +test_packages[6]+=" $base/command/agent/auth/kubernetes" +test_packages[6]+=" $base/command/agent/auth/token-file" +test_packages[6]+=" $base/command/agent/cache" +test_packages[6]+=" $base/command/agent/cache/cacheboltdb" +test_packages[6]+=" $base/command/agent/cache/cachememdb" +test_packages[6]+=" $base/command/agent/cache/keymanager" test_packages[6]+=" $base/command/agent/config" -test_packages[6]+=" $base/command/agent/exec" -test_packages[6]+=" $base/command/proxy/config" test_packages[6]+=" $base/command/config" test_packages[6]+=" $base/command/token" if [ "${ENTERPRISE:+x}" == "x" ] ; then @@ -203,7 +197,7 @@ test_packages[7]+=" $base/vault/quotas" # Total time: 779 test_packages[8]+=" $base/builtin/credential/aws/pkcs7" test_packages[8]+=" $base/builtin/logical/totp" -test_packages[8]+=" $base/command/agentproxyshared/auth" +test_packages[8]+=" $base/command/agent/auth" test_packages[8]+=" $base/physical/raft" test_packages[8]+=" $base/sdk/framework" test_packages[8]+=" $base/sdk/plugin" @@ -252,7 +246,6 @@ test_packages[13]+=" $base/command/server" test_packages[13]+=" $base/physical/aerospike" test_packages[13]+=" $base/physical/cockroachdb" test_packages[13]+=" $base/plugins/database/postgresql" -test_packages[13]+=" $base/plugins/database/postgresql/scram" if [ "${ENTERPRISE:+x}" == "x" ] ; then test_packages[13]+=" $base/vault/external_tests/filteredpathsext" fi @@ -288,9 +281,3 @@ if [ "${ENTERPRISE:+x}" == "x" ] ; then test_packages[16]+=" $base/vault/external_tests/replicationext" test_packages[16]+=" $base/vault/external_tests/sealext" fi - -for i in $(cd $(git rev-parse --show-toplevel) && go list -test -json ./... | - jq -r '.ForTest | select(.!=null) | select(.|test("_binary$"))'); -do - test_packages[17]+=" $i" -done diff --git a/.github/scripts/verify_changes.sh b/.github/scripts/verify_changes.sh index 78a41b14b31863..81f3b688eb5bdb 100755 --- a/.github/scripts/verify_changes.sh +++ b/.github/scripts/verify_changes.sh @@ -3,13 +3,15 @@ event_type=$1 # GH event type (pull_request) ref_name=$2 # branch reference that triggered the workflow -base_ref=$3 # PR branch base ref +head_ref=$3 # PR branch head ref +base_ref=$4 # PR branch base ref changed_dir="" if [[ "$event_type" == "pull_request" ]]; then + git fetch --no-tags --prune origin $head_ref git fetch --no-tags --prune origin $base_ref - head_commit="HEAD" + head_commit="origin/$head_ref" base_commit="origin/$base_ref" else git fetch --no-tags --prune origin $ref_name diff --git a/.github/workflows/actionlint.yml b/.github/workflows/actionlint.yml index 817de2d37b9370..abe7e7237b810e 100644 --- a/.github/workflows/actionlint.yml +++ b/.github/workflows/actionlint.yml @@ -10,6 +10,6 @@ jobs: actionlint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - name: "Check workflow files" uses: docker://docker.mirror.hashicorp.services/rhysd/actionlint@sha256:93834930f56ca380be3e9a3377670d7aa5921be251b9c774891a39b3629b83b8 diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index f78464a8c8b9e6..c2f347e5708195 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -11,7 +11,7 @@ jobs: backport-targeted-release-branch: if: github.event.pull_request.merged runs-on: ubuntu-latest - container: hashicorpdev/backport-assistant:0.3.3 + container: hashicorpdev/backport-assistant:0.3.0 steps: - name: Backport changes to targeted release branch run: | diff --git a/.github/workflows/build-vault-oss.yml b/.github/workflows/build-vault-oss.yml index 60480d53ae23e1..0ba21db599997c 100644 --- a/.github/workflows/build-vault-oss.yml +++ b/.github/workflows/build-vault-oss.yml @@ -40,14 +40,14 @@ jobs: runs-on: ubuntu-latest name: Vault ${{ inputs.goos }} ${{ inputs.goarch }} v${{ inputs.vault-version }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + - uses: actions/checkout@v3 + - uses: actions/setup-go@v3 with: go-version: ${{ inputs.go-version }} - name: Set up node and yarn - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 + uses: actions/setup-node@v3 with: - node-version-file: './ui/package.json' + node-version: 14 cache: yarn cache-dependency-path: ui/yarn.lock - name: Build UI @@ -68,7 +68,7 @@ jobs: env: BUNDLE_PATH: out/${{ env.ARTIFACT_BASENAME }}.zip run: make ci-bundle - - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + - uses: actions/upload-artifact@v3 with: name: ${{ env.ARTIFACT_BASENAME }}.zip path: out/${{ env.ARTIFACT_BASENAME }}.zip @@ -96,13 +96,13 @@ jobs: echo "RPM_PACKAGE=$(basename out/*.rpm)" >> "$GITHUB_ENV" echo "DEB_PACKAGE=$(basename out/*.deb)" >> "$GITHUB_ENV" - if: ${{ inputs.create-packages }} - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + uses: actions/upload-artifact@v3 with: name: ${{ env.RPM_PACKAGE }} path: out/${{ env.RPM_PACKAGE }} if-no-files-found: error - if: ${{ inputs.create-packages }} - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + uses: actions/upload-artifact@v3 with: name: ${{ env.DEB_PACKAGE }} path: out/${{ env.DEB_PACKAGE }} diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ccf4a351948822..cc2da2af06090a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,11 +4,6 @@ name: build on: workflow_dispatch: pull_request: - # The default types for pull_request are [ opened, synchronize, reopened ]. - # This is insufficient for our needs, since we're skipping stuff on PRs in - # draft mode. By adding the ready_for_review type, when a draft pr is marked - # ready, we run everything, including the stuff we'd have skipped up until now. - types: [ opened, synchronize, reopened, ready_for_review ] push: branches: - main @@ -17,22 +12,19 @@ on: jobs: # verify-changes determines if the changes are only for docs (website) verify-changes: - if: github.event.pull_request.draft == false runs-on: ubuntu-latest outputs: is_docs_change: ${{ steps.get-changeddir.outputs.is_docs_change }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: 0 # Use fetch depth 0 for comparing changes to base branch + - uses: actions/checkout@v3 - name: Get changed directories id: get-changeddir env: TYPE: ${{ github.event_name }} REF_NAME: ${{ github.ref_name }} + HEAD_REF: ${{ github.head_ref }} BASE: ${{ github.base_ref }} - run: ./.github/scripts/verify_changes.sh ${{ env.TYPE }} ${{ env.REF_NAME }} ${{ env.BASE }} + run: ./.github/scripts/verify_changes.sh ${{ env.TYPE }} ${{ env.REF_NAME }} ${{ env.HEAD_REF }} ${{ env.BASE }} product-metadata: # do not run build and test steps for docs changes @@ -51,7 +43,7 @@ jobs: vault-version: ${{ steps.get-metadata.outputs.vault-version }} vault-base-version: ${{ steps.get-metadata.outputs.vault-base-version }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 - name: Get metadata id: get-metadata env: @@ -73,7 +65,7 @@ jobs: with: version: ${{ steps.get-metadata.outputs.vault-version }} product: ${{ steps.get-metadata.outputs.package-name }} - - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + - uses: actions/upload-artifact@v3 with: name: metadata.json path: ${{ steps.generate-metadata-file.outputs.filepath }} @@ -152,7 +144,7 @@ jobs: matrix: arch: [arm, arm64, 386, amd64] steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 - uses: hashicorp/actions-docker-build@v1 with: version: ${{ needs.product-metadata.outputs.vault-version }} @@ -173,7 +165,7 @@ jobs: matrix: arch: [amd64] steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v2 - uses: hashicorp/actions-docker-build@v1 with: version: ${{ needs.product-metadata.outputs.vault-version }} @@ -184,15 +176,6 @@ jobs: test: name: Test ${{ matrix.build-artifact-name }} - # Only run the Enos workflow against branches that are created from the - # hashicorp/vault repository. This has the effect of limiting execution of - # Enos scenarios to branches that originate from authors that have write - # access to hashicorp/vault repository. This is required as Github Actions - # will not populate the required secrets for branches created by outside - # contributors in order to protect the secrets integrity. - # This condition can be removed in future if enos workflow is updated to - # workflow_run event - if: "! github.event.pull_request.head.repo.fork" needs: - product-metadata - build-linux @@ -216,16 +199,6 @@ jobs: test-docker-k8s: name: Test Docker K8s - # Only run the Enos workflow against branches that are created from the - # hashicorp/vault repository. This has the effect of limiting execution of - # Enos scenarios to branches that originate from authors that have write - # access to hashicorp/vault repository. This is required as Github Actions - # will not populate the required secrets for branches created by outside - # contributors in order to protect the secrets integrity. - # GHA secrets are only ready on workflow_run for public repo - # This condition can be removed in future if enos workflow is updated to - # workflow_run event - if: "! github.event.pull_request.head.repo.fork" needs: - product-metadata - build-docker diff --git a/.github/workflows/changelog-checker.yml b/.github/workflows/changelog-checker.yml index 155cc8eff1e7e4..3811a767fb18ac 100644 --- a/.github/workflows/changelog-checker.yml +++ b/.github/workflows/changelog-checker.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v2 with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 0 # by default the checkout action doesn't checkout all branches diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3658f62927470b..2f857653503f95 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,11 +1,6 @@ name: CI on: pull_request: - # The default types for pull_request are [ opened, synchronize, reopened ]. - # This is insufficient for our needs, since we're skipping stuff on PRs in - # draft mode. By adding the ready_for_review type, when a draft pr is marked - # ready, we run everything, including the stuff we'd have skipped up until now. - types: [ opened, synchronize, reopened, ready_for_review ] push: branches: - main @@ -54,7 +49,7 @@ jobs: container: image: returntocorp/semgrep@sha256:ffc6f3567654f9431456d49fd059dfe548f007c494a7eb6cd5a1a3e50d813fb3 steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - name: Run Semgrep Rules id: semgrep run: semgrep ci --include '*.go' --config 'tools/semgrep/ci' @@ -72,8 +67,8 @@ jobs: - setup runs-on: ${{ fromJSON(needs.setup.outputs.compute-tiny) }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c + - uses: actions/setup-go@d0a58c1c4d2b25278816e339b944508c875f3613 with: go-version-file: ./.go-version cache: true @@ -92,7 +87,7 @@ jobs: if: ${{ needs.setup.outputs.enterprise != '' && github.base_ref != '' }} runs-on: ${{ fromJSON(needs.setup.outputs.compute-tiny) }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - id: determine-branch @@ -130,10 +125,7 @@ jobs: !startsWith(github.head_ref, 'backport/docs/') uses: ./.github/workflows/test-go.yml with: - # The regular Go tests use an extra runner to execute the - # binary-dependent tests. We isolate them there so that the - # other tests aren't slowed down waiting for a binary build. - total-runners: 17 + total-runners: 16 go-arch: amd64 go-build-tags: '${{ needs.setup.outputs.go-build-tags }},deadlock' runs-on: ${{ needs.setup.outputs.compute-larger }} @@ -146,7 +138,6 @@ jobs: - setup-go-cache # Don't run this job for PR branches starting with 'ui/', 'backport/ui/', 'docs/', or 'backport/docs/' if: | - github.event.pull_request.draft == false && !startsWith(github.head_ref, 'ui/') && !startsWith(github.head_ref, 'backport/ui/') && !startsWith(github.head_ref, 'docs/') && @@ -163,13 +154,11 @@ jobs: go-build-tags: ${{ needs.setup.outputs.go-build-tags }} runs-on: ${{ needs.setup.outputs.compute-huge }} enterprise: ${{ needs.setup.outputs.enterprise }} - name: "-race" secrets: inherit test-go-fips: name: Run Go tests with FIPS configuration # Only run this job for the enterprise repo if the PR branch doesn't start with 'ui/', 'backport/ui/', 'docs/', or 'backport/docs/' if: | - github.event.pull_request.draft == false && needs.setup.outputs.enterprise == 1 && !startsWith(github.head_ref, 'ui/') && !startsWith(github.head_ref, 'backport/ui/') && @@ -189,21 +178,18 @@ jobs: go-build-tags: '${{ needs.setup.outputs.go-build-tags }},deadlock,cgo,fips,fips_140_2' runs-on: ${{ needs.setup.outputs.compute-larger }} enterprise: ${{ needs.setup.outputs.enterprise }} - name: "-fips" secrets: inherit test-ui: name: Test UI # The test-ui job is only run on: # - pushes to main and branches starting with "release/" # - PRs where the branch starts with "ui/", "backport/ui/", "merge", or when base branch starts with "release/" - # - PRs with the "ui" label on github if: | github.ref_name == 'main' || startsWith(github.ref_name, 'release/') || startsWith(github.head_ref, 'ui/') || startsWith(github.head_ref, 'backport/ui/') || - startsWith(github.head_ref, 'merge') || - contains(github.event.pull_request.labels.*.name, 'ui') + startsWith(github.head_ref, 'merge') needs: - setup permissions: @@ -211,26 +197,26 @@ jobs: contents: read runs-on: ${{ fromJSON(needs.setup.outputs.compute-larger) }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c + - uses: actions/setup-go@d0a58c1c4d2b25278816e339b944508c875f3613 with: go-version-file: ./.go-version cache: true # Setup node.js without caching to allow running npm install -g yarn (next step) - - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 + - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c with: - node-version-file: './ui/package.json' + node-version: 14 - id: install-yarn run: | npm install -g yarn # Setup node.js with caching using the yarn.lock file - - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 + - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c with: - node-version-file: './ui/package.json' + node-version: 14 cache: yarn cache-dependency-path: ui/yarn.lock - id: install-browser - uses: browser-actions/setup-chrome@c485fa3bab6be59dce18dbc18ef6ab7cbc8ff5f1 # v1.2.0 + uses: browser-actions/setup-chrome@29abc1a83d1d71557708563b4bc962d0f983a376 - id: ui-dependencies name: ui-dependencies working-directory: ./ui @@ -281,12 +267,12 @@ jobs: cd ui mkdir -p test-results/qunit yarn test:oss - - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce with: name: test-results-ui path: ui/test-results if: always() - - uses: test-summary/action@62bc5c68de2a6a0d02039763b8c754569df99e3f # TSCCR: no entry for repository "test-summary/action" + - uses: test-summary/action@62bc5c68de2a6a0d02039763b8c754569df99e3f with: paths: "ui/test-results/qunit/results.xml" show: "fail" @@ -300,4 +286,4 @@ jobs: runs-on: ${{ fromJSON(needs.setup.outputs.compute-tiny) }} steps: - run: | - tr -d '\n' <<< '${{ toJSON(needs.*.result) }}' | grep -q -v -E '(failure|cancelled)' + tr -d '\n' <<< '${{ toJSON(needs.*.result) }}' | grep -q -v -E '(failure|cancelled)' diff --git a/.github/workflows/drepecated-functions-checker.yml b/.github/workflows/drepecated-functions-checker.yml deleted file mode 100644 index 853681b34f820f..00000000000000 --- a/.github/workflows/drepecated-functions-checker.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: "Check Deprecations" - -on: - pull_request: - # Runs on PRs to main - branches: - - main - -jobs: - deprecations-check: - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - name: Checkout code - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - fetch-depth: 0 # by default the checkout action doesn't checkout all branches - - name: Setup Go - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 - with: - go-version-file: ./.go-version - cache: true - - name: Install required tools - run: | - make bootstrap - - name: Check deprecations for files in diff - run: | - # Need to run this from repository root and not from scripts/ as staticcheck works - # only on packages - ./scripts/deprecations-checker.sh ${{ github.event.pull_request.base.ref }} ${{ github.event.repository.name }} - \ No newline at end of file diff --git a/.github/workflows/enos-fmt.yml b/.github/workflows/enos-fmt.yml index d3d5ade0d63097..298b2dc185f1ed 100644 --- a/.github/workflows/enos-fmt.yml +++ b/.github/workflows/enos-fmt.yml @@ -15,7 +15,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.ELEVATED_GITHUB_TOKEN }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 - uses: hashicorp/setup-terraform@v2 with: terraform_wrapper: false diff --git a/.github/workflows/enos-release-testing-oss.yml b/.github/workflows/enos-release-testing-oss.yml index a39bab7aaacfc2..7cddbc56ed32c0 100644 --- a/.github/workflows/enos-release-testing-oss.yml +++ b/.github/workflows/enos-release-testing-oss.yml @@ -15,7 +15,7 @@ jobs: vault-revision: ${{ steps.get-metadata.outputs.vault-revision }} vault-version: ${{ steps.get-metadata.outputs.vault-version }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 with: # Check out the repository at the same Git SHA that was used to create # the artifacts to get the correct metadata. @@ -64,7 +64,6 @@ jobs: save-metadata: runs-on: linux - if: always() needs: test steps: - name: Persist metadata diff --git a/.github/workflows/enos-run-k8s.yml b/.github/workflows/enos-run-k8s.yml index 6af4d0393d8a91..e5200d025151bc 100644 --- a/.github/workflows/enos-run-k8s.yml +++ b/.github/workflows/enos-run-k8s.yml @@ -31,7 +31,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.ELEVATED_GITHUB_TOKEN }} steps: - name: Checkout - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + uses: actions/checkout@v3 - name: Set up Terraform uses: hashicorp/setup-terraform@v2 with: @@ -44,7 +44,7 @@ jobs: github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }} - name: Download Docker Image id: download - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + uses: actions/download-artifact@e9ef242655d12993efdcda9058dee2db83a2cb9b with: name: ${{ inputs.artifact-name }} path: ./enos/support/downloads diff --git a/.github/workflows/godoc-test-checker.yml b/.github/workflows/godoc-test-checker.yml index e56ebda42f1a44..c23f46b3fa5fbf 100644 --- a/.github/workflows/godoc-test-checker.yml +++ b/.github/workflows/godoc-test-checker.yml @@ -11,11 +11,11 @@ jobs: godoc-test-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - name: Set Up Go - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + uses: actions/setup-go@d0a58c1c4d2b25278816e339b944508c875f3613 with: cache: true go-version-file: ./.go-version diff --git a/.github/workflows/milestone-checker.yml b/.github/workflows/milestone-checker.yml index b23ce6081dd44b..77ff50b8cf1f1b 100644 --- a/.github/workflows/milestone-checker.yml +++ b/.github/workflows/milestone-checker.yml @@ -10,18 +10,16 @@ on: branches: - main - release/** - issues: - types: [milestoned, demilestoned] jobs: # checks that a milestone entry is present for a PR milestone-check: - # If there is a `pr/no-milestone` label we ignore this check - if: "! ( contains(github.event.pull_request.labels.*.name, 'pr/no-milestone') || ( github.event.name == 'labeled' && github.event.label == 'pr/no-milestone' ) )" + # If there a `pr/no-milestone` label we ignore this check + if: "!contains(github.event.pull_request.labels.*.name, 'pr/no-milestone')" runs-on: ubuntu-latest steps: - name: Checkout Actions - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + uses: actions/checkout@v2 with: repository: "grafana/grafana-github-actions" path: ./actions @@ -32,4 +30,4 @@ jobs: uses: ./actions/pr-checks with: token: ${{secrets.GITHUB_TOKEN}} - configPath: configs/milestone-check + configPath: configs/milestone-check \ No newline at end of file diff --git a/.github/workflows/nil-nil-function-checker.yml b/.github/workflows/nil-nil-function-checker.yml deleted file mode 100644 index 04c3e1ba43ddbe..00000000000000 --- a/.github/workflows/nil-nil-function-checker.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Check Functions For nil, nil returns - -on: - pull_request: - types: [opened, synchronize] - # Runs on PRs to main - branches: - - main - -jobs: - # Note: if there is a function we want to ignore this check for, - # You can add 'ignore-nil-nil-function-check' somewhere in the - # godoc for the function. - nil-nil-function-check: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - fetch-depth: 0 - - name: Set Up Go - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 - with: - cache: true - go-version-file: ./.go-version - - name: Verify functions don't return nil, nil - run: make ci-vet-gonilnilfunctions diff --git a/.github/workflows/oss.yml b/.github/workflows/oss.yml index dd6f3392f9f1fb..d49550ff5a05f1 100644 --- a/.github/workflows/oss.yml +++ b/.github/workflows/oss.yml @@ -19,9 +19,9 @@ jobs: runs-on: ubuntu-latest steps: - if: github.event.pull_request != null - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + uses: actions/checkout@v3 - if: github.event.pull_request != null - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2.11.1 + uses: dorny/paths-filter@v2 id: changes with: # derived from CODEOWNERS @@ -68,7 +68,7 @@ jobs: - if: github.event.pull_request != null && steps.changes.outputs.ui == 'true' run: echo "PROJECT=171" >> "$GITHUB_ENV" - - uses: actions/add-to-project@v0.3.0 # TSCCR: no entry for repository "actions/add-to-project" + - uses: actions/add-to-project@v0.3.0 with: project-url: https://github.com/orgs/hashicorp/projects/${{ env.PROJECT }} github-token: ${{ secrets.TRIAGE_GITHUB_TOKEN }} diff --git a/.github/workflows/remove-labels.yml b/.github/workflows/remove-labels.yml index 014b6752af7a49..7531e9fdacb977 100644 --- a/.github/workflows/remove-labels.yml +++ b/.github/workflows/remove-labels.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Remove triaging labels from closed issues and PRs - uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 # v1.3.0 + uses: actions-ecosystem/action-remove-labels@v1 with: labels: | waiting-for-response \ No newline at end of file diff --git a/.github/workflows/security-scan.yml b/.github/workflows/security-scan.yml index 3d58acbb7a9f50..b00c84cfcfa91a 100644 --- a/.github/workflows/security-scan.yml +++ b/.github/workflows/security-scan.yml @@ -13,25 +13,25 @@ jobs: runs-on: ['linux', 'large'] if: ${{ github.actor != 'dependabot[bot]' || github.actor != 'hc-github-team-secure-vault-core' }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 - name: Set up Go - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + uses: actions/setup-go@v3 with: go-version: 1.18 - name: Set up Python - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@v4 with: python-version: 3.x - name: Clone Security Scanner repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + uses: actions/checkout@v3 with: repository: hashicorp/security-scanner token: ${{ secrets.HASHIBOT_PRODSEC_GITHUB_TOKEN }} path: security-scanner - ref: 5a491479f4131d343afe0a4f18f6fcd36639f3fa + ref: 2526c196a28bb367b1ac6c997ff48e9ebf06834f - name: Install dependencies shell: bash @@ -77,6 +77,6 @@ jobs: cat results.sarif - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@9a866ed4524fc3422c3af1e446dab8efa3503411 # codeql-bundle-20230418 + uses: github/codeql-action/upload-sarif@v2 with: sarif_file: results.sarif diff --git a/.github/workflows/setup-go-cache.yml b/.github/workflows/setup-go-cache.yml index 6d8096c5dc1e93..3b8040a20545c3 100644 --- a/.github/workflows/setup-go-cache.yml +++ b/.github/workflows/setup-go-cache.yml @@ -8,10 +8,10 @@ jobs: setup-go-cache: runs-on: ${{ fromJSON(inputs.runs-on) }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c #v3.3.0 as of 2023-01-18 - id: setup-go name: Setup go - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + uses: actions/setup-go@d0a58c1c4d2b25278816e339b944508c875f3613 #v3.4.0 as of 2022-12-07 with: go-version-file: ./.go-version cache: true diff --git a/.github/workflows/stable-website.yaml b/.github/workflows/stable-website.yaml index 1447151d73deb0..fdd6da27f9d6ae 100644 --- a/.github/workflows/stable-website.yaml +++ b/.github/workflows/stable-website.yaml @@ -10,7 +10,7 @@ jobs: name: Cherry pick to stable-website branch steps: - name: Checkout - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + uses: actions/checkout@v2 with: ref: stable-website - run: | diff --git a/.github/workflows/test-ci-bootstrap.yml b/.github/workflows/test-ci-bootstrap.yml index a0efa8ddfdd0d1..ec3f2c9345652d 100644 --- a/.github/workflows/test-ci-bootstrap.yml +++ b/.github/workflows/test-ci-bootstrap.yml @@ -24,11 +24,11 @@ jobs: TF_VAR_aws_ssh_public_key: ${{ secrets.SSH_KEY_PUBLIC_CI }} TF_TOKEN_app_terraform_io: ${{ secrets.TF_API_TOKEN }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 - name: Set up Terraform uses: hashicorp/setup-terraform@v2 - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@v1 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_CI }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_CI }} diff --git a/.github/workflows/test-ci-cleanup.yml b/.github/workflows/test-ci-cleanup.yml index 0e3c90dd3b5fd8..3afdb7229d23f8 100644 --- a/.github/workflows/test-ci-cleanup.yml +++ b/.github/workflows/test-ci-cleanup.yml @@ -11,7 +11,7 @@ jobs: regions: ${{steps.setup.outputs.regions}} steps: - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@v1-node16 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_CI }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_CI }} @@ -40,7 +40,7 @@ jobs: steps: - name: Configure AWS credentials id: aws-configure - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@v1-node16 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_CI }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_CI }} @@ -49,7 +49,7 @@ jobs: role-skip-session-tagging: true role-duration-seconds: 3600 mask-aws-account-id: false - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 - name: Configure run: | cp enos/ci/aws-nuke.yml . @@ -75,7 +75,7 @@ jobs: region: ${{ fromJSON(needs.setup.outputs.regions) }} steps: - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@v1-node16 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_CI }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_CI }} diff --git a/.github/workflows/test-enos-scenario-ui.yml b/.github/workflows/test-enos-scenario-ui.yml index 53ae5f9c9f37b3..9fa25bd0f3b289 100644 --- a/.github/workflows/test-enos-scenario-ui.yml +++ b/.github/workflows/test-enos-scenario-ui.yml @@ -32,14 +32,18 @@ jobs: name: Get metadata runs-on: ubuntu-latest outputs: + go-version: ${{ steps.get-metadata.outputs.go-version }} + node-version: ${{ steps.get-metadata.outputs.node-version }} runs-on: ${{ steps.get-metadata.outputs.runs-on }} vault_edition: ${{ steps.get-metadata.outputs.vault_edition }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 - id: get-metadata env: IS_ENT: ${{ startsWith(github.event.repository.name, 'vault-enterprise' ) }} run: | + echo "go-version=$(cat ./.go-version)" >> "$GITHUB_OUTPUT" + echo "node-version=$(cat ./ui/.nvmrc)" >> "$GITHUB_OUTPUT" if [ "$IS_ENT" == true ]; then echo "detected vault_edition=ent" echo "runs-on=['self-hosted', 'ondemand', 'os=linux', 'type=m5d.4xlarge']" >> "$GITHUB_OUTPUT" @@ -67,20 +71,20 @@ jobs: GOPRIVATE: github.com/hashicorp steps: - name: Checkout - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + uses: actions/checkout@v3 - name: Set Up Go - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + uses: actions/setup-go@v3 with: - go-version-file: ./.go-version + go-version: ${{ needs.get-metadata.outputs.go-version }} - uses: hashicorp/action-setup-enos@v1 with: github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }} - name: Set Up Git run: git config --global url."https://${{ secrets.elevated_github_token }}:@github.com".insteadOf "https://github.com" - name: Set Up Node - uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 + uses: actions/setup-node@v3 with: - node-version-file: './ui/package.json' + node-version: ${{ needs.get-metadata.outputs.node-version }} - name: Set Up Terraform uses: hashicorp/setup-terraform@v2 with: @@ -104,12 +108,12 @@ jobs: sudo apt install -y libnss3-dev libgdk-pixbuf2.0-dev libgtk-3-dev libxss-dev libasound2 - name: Install Chrome if: steps.chrome-check.outputs.chrome-version == 'not-installed' - uses: browser-actions/setup-chrome@c485fa3bab6be59dce18dbc18ef6ab7cbc8ff5f1 # v1.2.0 + uses: browser-actions/setup-chrome@v1 - name: Installed Chrome Version run: | echo "Installed Chrome Version = [$(chrome --version 2> /dev/null || google-chrome --version 2> /dev/null || google-chrome-stable --version 2> /dev/null)]" - name: Configure AWS credentials from Test account - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@v1-node16 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_CI }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_CI }} diff --git a/.github/workflows/test-go.yml b/.github/workflows/test-go.yml index b30608008ca5a0..d0eeeeed81ff98 100644 --- a/.github/workflows/test-go.yml +++ b/.github/workflows/test-go.yml @@ -22,7 +22,6 @@ on: description: A space-separated list of additional build flags. required: false type: string - default: '' runs-on: description: An expression indicating which kind of runners to use. required: false @@ -32,11 +31,6 @@ on: description: A comma-separated list of additional build tags to consider satisfied during the build. required: false type: string - name: - description: A suffix to append to archived test results - required: false - default: '' - type: string env: ${{ fromJSON(inputs.env-vars) }} @@ -45,7 +39,7 @@ jobs: runs-on: ${{ fromJSON(inputs.runs-on) }} name: Verify Test Package Distribution steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - id: test working-directory: .github/scripts run: | @@ -85,8 +79,8 @@ jobs: GOPRIVATE: github.com/hashicorp/* TIMEOUT_IN_MINUTES: 60 steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c + - uses: actions/setup-go@d0a58c1c4d2b25278816e339b944508c875f3613 with: go-version-file: ./.go-version cache: true @@ -122,21 +116,9 @@ jobs: if: github.repository != 'hashicorp/vault-enterprise' run: | git config --global url."https://${{ secrets.ELEVATED_GITHUB_TOKEN}}@github.com".insteadOf https://github.com - - id: go-mod-download - if: matrix.runner-index > 16 - env: - GOPRIVATE: github.com/hashicorp/* - run: time go mod download -x - - id: build - if: matrix.runner-index > 16 - env: - GOPRIVATE: github.com/hashicorp/* - run: time make ci-bootstrap dev - id: run-go-tests name: Run Go tests timeout-minutes: ${{ fromJSON(env.TIMEOUT_IN_MINUTES) }} - env: - COMMIT_SHA: ${{ github.sha }} run: | set -exo pipefail @@ -176,11 +158,6 @@ jobs: #export HCP_SCADA_ADDRESS=${{ secrets.HCP_SCADA_ADDRESS }} fi - if [ -f bin/vault ]; then - VAULT_BINARY="$(pwd)/bin/vault" - export VAULT_BINARY - fi - # shellcheck disable=SC2086 # can't quote package list GOARCH=${{ inputs.go-arch }} \ go run gotest.tools/gotestsum --format=short-verbose \ @@ -210,13 +187,13 @@ jobs: datadog-ci junit upload --service "$GITHUB_REPOSITORY" test-results/go-test/results.xml if: always() - name: Archive test results - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce with: - name: test-results${{ inputs.name }}-${{ matrix.runner-index }} + name: test-results-${{ matrix.runner-index }} path: test-results/ if: always() - name: Create a summary of tests - uses: test-summary/action@62bc5c68de2a6a0d02039763b8c754569df99e3f # TSCCR: no entry for repository "test-summary/action" + uses: test-summary/action@62bc5c68de2a6a0d02039763b8c754569df99e3f with: paths: "test-results/go-test/results.xml" show: "fail" diff --git a/.github/workflows/test-link-rewrites.yml b/.github/workflows/test-link-rewrites.yml new file mode 100644 index 00000000000000..1d9c0ba60bb61f --- /dev/null +++ b/.github/workflows/test-link-rewrites.yml @@ -0,0 +1,16 @@ +name: Test Link Rewrites + +on: [deployment_status] + +jobs: + test-link-rewrites: + if: github.event.deployment_status.state == 'success' + uses: hashicorp/dev-portal/.github/workflows/docs-content-link-rewrites-e2e.yml@2aceb60125f6c15f4c8dbe2e4d79148047bfa437 + with: + repo-owner: "hashicorp" + repo-name: "vault" + commit-sha: ${{ github.sha }} + main-branch-preview-url: "https://vault-git-main-hashicorp.vercel.app/" + # Workflow is only intended to run for one single migration PR + # This variable does not need to be updated + pr-branch-preview-url: "https://vault-git-docs-ambmigrate-link-formats-hashicorp.vercel.app/" diff --git a/.github/workflows/test-run-acc-tests-for-path.yml b/.github/workflows/test-run-acc-tests-for-path.yml index 10d539e7d85c28..f82b32c567a155 100644 --- a/.github/workflows/test-run-acc-tests-for-path.yml +++ b/.github/workflows/test-run-acc-tests-for-path.yml @@ -20,13 +20,13 @@ jobs: go-test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - name: Set Up Go - uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 + uses: actions/setup-go@d0a58c1c4d2b25278816e339b944508c875f3613 with: go-version-file: ./.go-version - run: go test -v ./${{ inputs.path }}/... 2>&1 | tee ${{ inputs.name }}.txt - - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + - uses: actions/upload-artifact@b7f8abb1508181956e8e162db84b466c27e18ce with: name: ${{ inputs.name }}-output path: ${{ inputs.name }}.txt diff --git a/.github/workflows/test-run-enos-scenario-matrix.yml b/.github/workflows/test-run-enos-scenario-matrix.yml index 7a1abd7bc3038e..b40b6b50b3f25c 100644 --- a/.github/workflows/test-run-enos-scenario-matrix.yml +++ b/.github/workflows/test-run-enos-scenario-matrix.yml @@ -72,7 +72,7 @@ jobs: MATRIX_FILE: ./.github/enos-run-matrices/${{ inputs.matrix-file-name }}.json MATRIX_TEST_GROUP: ${{ inputs.matrix-test-group }} steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 with: ref: ${{ inputs.vault-revision }} - id: metadata @@ -106,13 +106,13 @@ jobs: ENOS_VAR_vault_license_path: ./support/vault.hclic ENOS_DEBUG_DATA_ROOT_DIR: /tmp/enos-debug-data steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - uses: actions/checkout@v3 - uses: hashicorp/setup-terraform@v2 with: # the Terraform wrapper will break Terraform execution in Enos because # it changes the output to text when we expect it to be JSON. terraform_wrapper: false - - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + - uses: aws-actions/configure-aws-credentials@v1-node16 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_CI }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_CI }} @@ -131,7 +131,7 @@ jobs: chmod 600 "./enos/support/private_key.pem" echo "debug_data_artifact_name=enos-debug-data_$(echo "${{ matrix.scenario }}" | sed -e 's/ /_/g' | sed -e 's/:/=/g')" >> "$GITHUB_OUTPUT" - if: contains(inputs.matrix-file-name, 'github') - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + uses: actions/download-artifact@v3 with: name: ${{ inputs.build-artifact-name }} path: ./enos/support/downloads @@ -150,7 +150,7 @@ jobs: run: enos scenario run --timeout 60m0s --chdir ./enos ${{ matrix.scenario }} - name: Upload Debug Data if: failure() - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + uses: actions/upload-artifact@v3 with: # The name of the artifact is the same as the matrix scenario name with the spaces replaced with underscores and colons replaced by equals. name: ${{ steps.prepare_scenario.outputs.debug_data_artifact_name }} diff --git a/.gitignore b/.gitignore index 81e0bbeb74468f..f81de12cfe249c 100644 --- a/.gitignore +++ b/.gitignore @@ -133,5 +133,3 @@ website/components/node_modules *.log tools/godoctests/.bin -tools/gonilnilfunctions/.bin - diff --git a/.release/ci.hcl b/.release/ci.hcl index 335a21200fbfaf..0be4e8ba9b7168 100644 --- a/.release/ci.hcl +++ b/.release/ci.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - schema = "1" project "vault" { diff --git a/.release/docker/docker-entrypoint.sh b/.release/docker/docker-entrypoint.sh index 2b9b8f35a1606f..3b72da25b7f412 100755 --- a/.release/docker/docker-entrypoint.sh +++ b/.release/docker/docker-entrypoint.sh @@ -1,7 +1,4 @@ #!/usr/bin/dumb-init /bin/sh -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e # Note above that we run dumb-init as PID 1 in order to reap zombie processes diff --git a/.release/docker/ubi-docker-entrypoint.sh b/.release/docker/ubi-docker-entrypoint.sh index 794e69c614867a..6f818bcd439fb1 100755 --- a/.release/docker/ubi-docker-entrypoint.sh +++ b/.release/docker/ubi-docker-entrypoint.sh @@ -1,7 +1,4 @@ #!/bin/sh -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e # Prevent core dumps diff --git a/.release/linux/package/etc/vault.d/vault.hcl b/.release/linux/package/etc/vault.d/vault.hcl index 4a59d367250737..33c2e5f3225ed4 100644 --- a/.release/linux/package/etc/vault.d/vault.hcl +++ b/.release/linux/package/etc/vault.d/vault.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # Full configuration options can be found at https://www.vaultproject.io/docs/configuration ui = true diff --git a/.release/release-metadata.hcl b/.release/release-metadata.hcl index 3a49b69c59b14a..19aadfc71ae1ad 100644 --- a/.release/release-metadata.hcl +++ b/.release/release-metadata.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - url_docker_registry_dockerhub = "https://hub.docker.com/r/hashicorp/vault" url_docker_registry_ecr = "https://gallery.ecr.aws/hashicorp/vault" url_license = "https://github.com/hashicorp/vault/blob/main/LICENSE" diff --git a/.release/security-scan.hcl b/.release/security-scan.hcl index 62460e431db968..6d394feaacc18a 100644 --- a/.release/security-scan.hcl +++ b/.release/security-scan.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - container { dependencies = true alpine_secdb = true diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ad1236952d6c7..6c2611347420c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,270 +1,9 @@ -## 1.14.0-rc1 -### June 08, 2023 - -CHANGES: - -* auth/alicloud: Updated plugin from v0.14.0 to v0.15.0 [[GH-20758](https://github.com/hashicorp/vault/pull/20758)] -* auth/azure: Updated plugin from v0.13.0 to v0.15.0 [[GH-20816](https://github.com/hashicorp/vault/pull/20816)] -* auth/centrify: Updated plugin from v0.14.0 to v0.15.1 [[GH-20745](https://github.com/hashicorp/vault/pull/20745)] -* auth/gcp: Updated plugin from v0.15.0 to v0.16.0 [[GH-20725](https://github.com/hashicorp/vault/pull/20725)] -* auth/jwt: Updated plugin from v0.15.0 to v0.16.0 [[GH-20799](https://github.com/hashicorp/vault/pull/20799)] -* auth/kubernetes: Update plugin to v0.16.0 [[GH-20802](https://github.com/hashicorp/vault/pull/20802)] -* core: Bump Go version to 1.20.4. -* core: Remove feature toggle for SSCTs, i.e. the env var VAULT_DISABLE_SERVER_SIDE_CONSISTENT_TOKENS. [[GH-20834](https://github.com/hashicorp/vault/pull/20834)] -* core: Revert #19676 (VAULT_GRPC_MIN_CONNECT_TIMEOUT env var) as we decided it was unnecessary. [[GH-20826](https://github.com/hashicorp/vault/pull/20826)] -* database/couchbase: Updated plugin from v0.9.0 to v0.9.2 [[GH-20764](https://github.com/hashicorp/vault/pull/20764)] -* database/redis-elasticache: Updated plugin from v0.2.0 to v0.2.1 [[GH-20751](https://github.com/hashicorp/vault/pull/20751)] -* replication (enterprise): Add a new parameter for the update-primary API call -that allows for setting of the primary cluster addresses directly, instead of -via a token. -* secrets/ad: Updated plugin from v0.10.1-0.20230329210417-0b2cdb26cf5d to v0.16.0 [[GH-20750](https://github.com/hashicorp/vault/pull/20750)] -* secrets/alicloud: Updated plugin from v0.5.4-beta1.0.20230330124709-3fcfc5914a22 to v0.15.0 [[GH-20787](https://github.com/hashicorp/vault/pull/20787)] -* secrets/aure: Updated plugin from v0.15.0 to v0.16.0 [[GH-20777](https://github.com/hashicorp/vault/pull/20777)] -* secrets/database/mongodbatlas: Updated plugin from v0.9.0 to v0.10.0 [[GH-20882](https://github.com/hashicorp/vault/pull/20882)] -* secrets/database/snowflake: Updated plugin from v0.7.0 to v0.8.0 [[GH-20807](https://github.com/hashicorp/vault/pull/20807)] -* secrets/gcp: Updated plugin from v0.15.0 to v0.16.0 [[GH-20818](https://github.com/hashicorp/vault/pull/20818)] -* secrets/keymgmt: Updated plugin to v0.9.1 -* secrets/kubernetes: Update plugin to v0.5.0 [[GH-20802](https://github.com/hashicorp/vault/pull/20802)] -* secrets/mongodbatlas: Updated plugin from v0.9.1 to v0.10.0 [[GH-20742](https://github.com/hashicorp/vault/pull/20742)] -* secrets/pki: Warning when issuing leafs from CSRs with basic constraints. In the future, issuance of non-CA leaf certs from CSRs with asserted IsCA Basic Constraints will be prohibited. [[GH-20654](https://github.com/hashicorp/vault/pull/20654)] - -FEATURES: - -* **AWS Static Roles**: The AWS Secrets Engine can manage static roles configured by users. [[GH-20536](https://github.com/hashicorp/vault/pull/20536)] -* **Automated License Utilization Reporting**: Added automated license -utilization reporting, which sends minimal product-license [metering -data](https://developer.hashicorp.com/vault/docs/enterprise/license/utilization-reporting) -to HashiCorp without requiring you to manually collect and report them. -* **MongoDB Atlas Database Secrets**: Adds support for generating X.509 certificates on dynamic roles for user authentication [[GH-20882](https://github.com/hashicorp/vault/pull/20882)] -* **NEW PKI Workflow in UI**: Completes generally available rollout of new PKI UI that provides smoother mount configuration and a more guided user experience [[GH-pki-ui-improvements](https://github.com/hashicorp/vault/pull/pki-ui-improvements)] -* **Vault PKI ACME Server**: Support for the ACME certificate lifecycle management protocol has been added to the Vault PKI Plugin. This allows standard ACME clients, such as the EFF's certbot and the CNCF's k8s cert-manager, to request certificates from a Vault server with no knowledge of Vault APIs or authentication mechanisms. For public-facing Vault instances, we recommend requiring External Account Bindings (EAB) to limit the ability to request certificates to only authenticated clients. [[GH-20752](https://github.com/hashicorp/vault/pull/20752)] -* **Vault Proxy**: Introduced Vault Proxy, a new subcommand of the Vault binary that can be invoked using `vault proxy -config=config.hcl`. It currently has the same feature set as Vault Agent's API proxy, but the two may diverge in the future. We plan to deprecate the API proxy functionality of Vault Agent in a future release. [[GH-20548](https://github.com/hashicorp/vault/pull/20548)] -* cli: Add 'agent generate-config' sub-command [[GH-20530](https://github.com/hashicorp/vault/pull/20530)] -* **Sidebar Navigation in UI**: A new sidebar navigation panel has been added in the UI to replace the top navigation bar. - -IMPROVEMENTS: - -* activitylog: EntityRecord protobufs now contain a ClientType field for -distinguishing client sources. [[GH-20626](https://github.com/hashicorp/vault/pull/20626)] -* agent: Add integration tests for agent running in process supervisor mode [[GH-20741](https://github.com/hashicorp/vault/pull/20741)] -* agent: Add logic to validate env_template entries in configuration [[GH-20569](https://github.com/hashicorp/vault/pull/20569)] -* agent: initial implementation of a process runner for injecting secrets via environment variables via vault agent [[GH-20628](https://github.com/hashicorp/vault/pull/20628)] -* api: GET ... /sys/internal/counters/activity?current_billing_period=true now -results in a response which contains the full billing period [[GH-20694](https://github.com/hashicorp/vault/pull/20694)] -* audit: forwarded requests can now contain host metadata on the node it was sent 'from' or a flag to indicate that it was forwarded. -* auth/kerberos: Enable plugin multiplexing -auth/kerberos: Upgrade plugin dependencies [[GH-20771](https://github.com/hashicorp/vault/pull/20771)] -* command/server (enterprise): -dev-three-node now creates perf standbys instead of regular standbys. [[GH-20629](https://github.com/hashicorp/vault/pull/20629)] -* command/server: Add support for dumping pprof files to the filesystem via SIGUSR2 when -`VAULT_PPROF_WRITE_TO_FILE=true` is set on the server. [[GH-20609](https://github.com/hashicorp/vault/pull/20609)] -* core (enterprise): license updates trigger a reload of reporting and the activity log [[GH-20680](https://github.com/hashicorp/vault/pull/20680)] -* core (enterprise): support reloading configuration for automated reporting via SIGHUP [[GH-20680](https://github.com/hashicorp/vault/pull/20680)] -* core, secrets/pki, audit: Update dependency go-jose to v3 due to v2 deprecation. [[GH-20559](https://github.com/hashicorp/vault/pull/20559)] -* core: Add possibility to decode a generated encoded root token via the rest API [[GH-20595](https://github.com/hashicorp/vault/pull/20595)] -* core: include namespace path in granting_policies block of audit log -* core: include reason for ErrReadOnly on PBPWF writing failures -* core: report intermediate error messages during request forwarding [[GH-20643](https://github.com/hashicorp/vault/pull/20643)] -* database/elasticsearch: Upgrade plugin dependencies [[GH-20767](https://github.com/hashicorp/vault/pull/20767)] -* database/redis: Upgrade plugin dependencies [[GH-20763](https://github.com/hashicorp/vault/pull/20763)] -* sdk/framework: Fix non-deterministic ordering of 'required' fields in OpenAPI spec [[GH-20881](https://github.com/hashicorp/vault/pull/20881)] -* secrets/consul: Improve error message when ACL bootstrapping fails. [[GH-20891](https://github.com/hashicorp/vault/pull/20891)] -* secrets/gcpkms: Enable plugin multiplexing -secrets/gcpkms: Upgrade plugin dependencies [[GH-20784](https://github.com/hashicorp/vault/pull/20784)] -* secrets/pki: add subject key identifier to read key response [[GH-20642](https://github.com/hashicorp/vault/pull/20642)] -* secrets/transit: Respond to writes with updated key policy, cache configuration. [[GH-20652](https://github.com/hashicorp/vault/pull/20652)] -* secrets/transit: Support BYOK-encrypted export of keys to securely allow synchronizing specific keys and version across clusters. [[GH-20736](https://github.com/hashicorp/vault/pull/20736)] -* ui: Add filtering by auth type and auth name to the Authentication Method list view. [[GH-20747](https://github.com/hashicorp/vault/pull/20747)] -* ui: Update Web CLI with examples and a new `kv-get` command for reading kv v2 data and metadata [[GH-20590](https://github.com/hashicorp/vault/pull/20590)] - -BUG FIXES: - -* agent: Fix bug with 'cache' stanza validation [[GH-20934](https://github.com/hashicorp/vault/pull/20934)] -* api: Properly Handle nil identity_policies in Secret Data [[GH-20636](https://github.com/hashicorp/vault/pull/20636)] -* auth/ldap: Set default value for `max_page_size` properly [[GH-20453](https://github.com/hashicorp/vault/pull/20453)] -* core (enterprise): Fix intermittent issue with token entries sometimes not being found when using a newly created token in a request to a secondary, even when SSCT `new_token` forwarding is set. When this occurred, this would result in the following error to the client: `error performing token check: no lease entry found for token that ought to have one, possible eventual consistency issue`. -* core (enterprise): Fix log shipper buffer size overflow issue for 32 bit architecture. -* core (enterprise): Fix logshipper buffer size to default to DefaultBufferSize only when reported system memory is zero. -* core (enterprise): Fix panic when using invalid accessor for control-group request -* core (enterprise): Fix perf standby WAL streaming silently failures when replication setup happens at a bad time. -* core (enterprise): Fix read on perf standbys failing with 412 after leadership change, unseal, restores or restarts when no writes occur -* core (enterprise): Remove MFA Enforcment configuration for namespace when deleting namespace -* core/ssct (enterprise): Fixed race condition where a newly promoted DR may revert `sscGenCounter` -resulting in 412 errors. -* core: Fix Forwarded Writer construction to correctly find active nodes, allowing PKI cross-cluster functionality to succeed on existing mounts. -* core: Fix writes to readonly storage on performance standbys when user lockout feature is enabled. [[GH-20783](https://github.com/hashicorp/vault/pull/20783)] -* license (enterprise): Fix bug where license would update even if the license didn't change. -* replication (enterprise): Fix a caching issue when replicating filtered data to -a performance secondary. This resulted in the data being set to nil in the cache -and a "invalid value" error being returned from the API. -* replication (enterprise): Fix a race condition with invalid tokens during WAL streaming that was causing Secondary clusters to be unable to connect to a Primary. -* replication (enterprise): Fix bug where reloading external plugin on a secondary would -break replication. -* replication (enterprise): Fix replication status for Primary clusters showing its primary cluster's information (in case of DR) in secondaries field when known_secondaries field is nil -* replication (enterprise): fix bug where secondary grpc connections would timeout when connecting to a primary host that no longer exists. -* secrets/pki: Support setting both maintain_stored_certificate_counts=false and publish_stored_certificate_count_metrics=false explicitly in tidy config. [[GH-20664](https://github.com/hashicorp/vault/pull/20664)] -* secrets/transform (enterprise): Address SQL connection leak when cleaning expired tokens -* secrets/transform (enterprise): Fix a caching bug affecting secondary nodes after a tokenization key rotation -* secrets/transform (enterprise): Fix persistence problem with rotated tokenization key versions -* secrets/transform: Added importing of keys and key versions into the Transform secrets engine using the command 'vault transform import' and 'vault transform import-version'. [[GH-20668](https://github.com/hashicorp/vault/pull/20668)] -* secrets/transit: Fix export of HMAC-only key, correctly exporting the key used for sign operations. For consumers of the previously incorrect key, use the plaintext export to retrieve these incorrect keys and import them as new versions. -secrets/transit: Fix bug related to shorter dedicated HMAC key sizing. -sdk/helper/keysutil: New HMAC type policies will have HMACKey equal to Key and be copied over on import. [[GH-20864](https://github.com/hashicorp/vault/pull/20864)] -* ui: Fixes issue unsealing cluster for seal types other than shamir [[GH-20897](https://github.com/hashicorp/vault/pull/20897)] -* ui: fixes auto_rotate_period ttl input for transit keys [[GH-20731](https://github.com/hashicorp/vault/pull/20731)] -* ui: fixes key_bits and signature_bits reverting to default values when editing a pki role [[GH-20907](https://github.com/hashicorp/vault/pull/20907)] - -## 1.13.3 -### June 08, 2023 - -CHANGES: - -* core: Bump Go version to 1.20.4. -* core: Revert #19676 (VAULT_GRPC_MIN_CONNECT_TIMEOUT env var) as we decided it was unnecessary. [[GH-20826](https://github.com/hashicorp/vault/pull/20826)] -* replication (enterprise): Add a new parameter for the update-primary API call -that allows for setting of the primary cluster addresses directly, instead of -via a token. -* storage/aerospike: Aerospike storage shouldn't be used on 32-bit architectures and is now unsupported on them. [[GH-20825](https://github.com/hashicorp/vault/pull/20825)] - -IMPROVEMENTS: - -* Add debug symbols back to builds to fix Dynatrace support [[GH-20519](https://github.com/hashicorp/vault/pull/20519)] -* audit: add a `mount_point` field to audit requests and response entries [[GH-20411](https://github.com/hashicorp/vault/pull/20411)] -* autopilot: Update version to v0.2.0 to add better support for respecting min quorum [[GH-19472](https://github.com/hashicorp/vault/pull/19472)] -* command/server: Add support for dumping pprof files to the filesystem via SIGUSR2 when -`VAULT_PPROF_WRITE_TO_FILE=true` is set on the server. [[GH-20609](https://github.com/hashicorp/vault/pull/20609)] -* core: Add possibility to decode a generated encoded root token via the rest API [[GH-20595](https://github.com/hashicorp/vault/pull/20595)] -* core: include namespace path in granting_policies block of audit log -* core: report intermediate error messages during request forwarding [[GH-20643](https://github.com/hashicorp/vault/pull/20643)] -* openapi: Fix generated types for duration strings [[GH-20841](https://github.com/hashicorp/vault/pull/20841)] -* sdk/framework: Fix non-deterministic ordering of 'required' fields in OpenAPI spec [[GH-20881](https://github.com/hashicorp/vault/pull/20881)] -* secrets/pki: add subject key identifier to read key response [[GH-20642](https://github.com/hashicorp/vault/pull/20642)] - -BUG FIXES: - -* api: Properly Handle nil identity_policies in Secret Data [[GH-20636](https://github.com/hashicorp/vault/pull/20636)] -* auth/ldap: Set default value for `max_page_size` properly [[GH-20453](https://github.com/hashicorp/vault/pull/20453)] -* cli: CLI should take days as a unit of time for ttl like flags [[GH-20477](https://github.com/hashicorp/vault/pull/20477)] -* cli: disable printing flags warnings messages for the ssh command [[GH-20502](https://github.com/hashicorp/vault/pull/20502)] -* command/server: fixes panic in Vault server command when running in recovery mode [[GH-20418](https://github.com/hashicorp/vault/pull/20418)] -* core (enterprise): Fix log shipper buffer size overflow issue for 32 bit architecture. -* core (enterprise): Fix logshipper buffer size to default to DefaultBufferSize only when reported system memory is zero. -* core (enterprise): Remove MFA Enforcment configuration for namespace when deleting namespace -* core/identity: Allow updates of only the custom-metadata for entity alias. [[GH-20368](https://github.com/hashicorp/vault/pull/20368)] -* core: Fix Forwarded Writer construction to correctly find active nodes, allowing PKI cross-cluster functionality to succeed on existing mounts. -* core: Fix writes to readonly storage on performance standbys when user lockout feature is enabled. [[GH-20783](https://github.com/hashicorp/vault/pull/20783)] -* core: prevent panic on login after namespace is deleted that had mfa enforcement [[GH-20375](https://github.com/hashicorp/vault/pull/20375)] -* replication (enterprise): Fix a race condition with invalid tokens during WAL streaming that was causing Secondary clusters to be unable to connect to a Primary. -* replication (enterprise): fix bug where secondary grpc connections would timeout when connecting to a primary host that no longer exists. -* secrets/pki: Include per-issuer enable_aia_url_templating in issuer read endpoint. [[GH-20354](https://github.com/hashicorp/vault/pull/20354)] -* secrets/transform (enterprise): Fix a caching bug affecting secondary nodes after a tokenization key rotation -* secrets/transform: Added importing of keys and key versions into the Transform secrets engine using the command 'vault transform import' and 'vault transform import-version'. [[GH-20668](https://github.com/hashicorp/vault/pull/20668)] -* secrets/transit: Fix export of HMAC-only key, correctly exporting the key used for sign operations. For consumers of the previously incorrect key, use the plaintext export to retrieve these incorrect keys and import them as new versions. -secrets/transit: Fix bug related to shorter dedicated HMAC key sizing. -sdk/helper/keysutil: New HMAC type policies will have HMACKey equal to Key and be copied over on import. [[GH-20864](https://github.com/hashicorp/vault/pull/20864)] -* ui: Fixes issue unsealing cluster for seal types other than shamir [[GH-20897](https://github.com/hashicorp/vault/pull/20897)] -* ui: fixes issue creating mfa login enforcement from method enforcements tab [[GH-20603](https://github.com/hashicorp/vault/pull/20603)] -* ui: fixes key_bits and signature_bits reverting to default values when editing a pki role [[GH-20907](https://github.com/hashicorp/vault/pull/20907)] - -## 1.13.2 -### April 26, 2023 - -CHANGES: - -* core: Bump Go version to 1.20.3. - -SECURITY: - -* core/seal: Fix handling of HMACing of seal-wrapped storage entries from HSMs using CKM_AES_CBC or CKM_AES_CBC_PAD which may have allowed an attacker to conduct a padding oracle attack. This vulnerability, CVE-2023-2197, affects Vault from 1.13.0 up to 1.13.1 and was fixed in 1.13.2. [[HCSEC-2023-14](https://discuss.hashicorp.com/t/hcsec-2023-14-vault-enterprise-vulnerable-to-padding-oracle-attacks-when-using-a-cbc-based-encryption-mechanism-with-a-hsm/53322)] - -IMPROVEMENTS: - -* Add debug symbols back to builds to fix Dynatrace support [[GH-20294](https://github.com/hashicorp/vault/pull/20294)] -* cli/namespace: Add detailed flag to output additional namespace information -such as namespace IDs and custom metadata. [[GH-20243](https://github.com/hashicorp/vault/pull/20243)] -* core/activity: add an endpoint to write test activity log data, guarded by a build flag [[GH-20019](https://github.com/hashicorp/vault/pull/20019)] -* core: Add a `raft` sub-field to the `storage` and `ha_storage` details provided by the -`/sys/config/state/sanitized` endpoint in order to include the `max_entry_size`. [[GH-20044](https://github.com/hashicorp/vault/pull/20044)] -* core: include reason for ErrReadOnly on PBPWF writing failures -* sdk/ldaputil: added `connection_timeout` to tune connection timeout duration -for all LDAP plugins. [[GH-20144](https://github.com/hashicorp/vault/pull/20144)] -* secrets/pki: Decrease size and improve compatibility of OCSP responses by removing issuer certificate. [[GH-20201](https://github.com/hashicorp/vault/pull/20201)] -* sys/wrapping: Add example how to unwrap without authentication in Vault [[GH-20109](https://github.com/hashicorp/vault/pull/20109)] -* ui: Allows license-banners to be dismissed. Saves preferences in localStorage. [[GH-19116](https://github.com/hashicorp/vault/pull/19116)] - -BUG FIXES: - -* auth/ldap: Add max_page_size configurable to LDAP configuration [[GH-19032](https://github.com/hashicorp/vault/pull/19032)] -* command/server: Fix incorrect paths in generated config for `-dev-tls` flag on Windows [[GH-20257](https://github.com/hashicorp/vault/pull/20257)] -* core (enterprise): Fix intermittent issue with token entries sometimes not being found when using a newly created token in a request to a secondary, even when SSCT `new_token` forwarding is set. When this occurred, this would result in the following error to the client: `error performing token check: no lease entry found for token that ought to have one, possible eventual consistency issue`. -* core (enterprise): Fix read on perf standbys failing with 412 after leadership change, unseal, restores or restarts when no writes occur -* core/ssct (enterprise): Fixed race condition where a newly promoted DR may revert `sscGenCounter` -resulting in 412 errors. -* core: Fix regression breaking non-raft clusters whose nodes share the same cluster_addr/api_addr. [[GH-19721](https://github.com/hashicorp/vault/pull/19721)] -* helper/random: Fix race condition in string generator helper [[GH-19875](https://github.com/hashicorp/vault/pull/19875)] -* kmip (enterprise): Fix a problem decrypting with keys that have no Process Start Date attribute. -* pki: Fix automatically turning off CRL signing on upgrade to Vault >= 1.12, if CA Key Usage disallows it [[GH-20220](https://github.com/hashicorp/vault/pull/20220)] -* replication (enterprise): Fix a caching issue when replicating filtered data to -a performance secondary. This resulted in the data being set to nil in the cache -and a "invalid value" error being returned from the API. -* replication (enterprise): Fix replication status for Primary clusters showing its primary cluster's information (in case of DR) in secondaries field when known_secondaries field is nil -* sdk/helper/ocsp: Workaround bug in Go's ocsp.ParseResponse(...), causing validation to fail with embedded CA certificates. -auth/cert: Fix OCSP validation against Vault's PKI engine. [[GH-20181](https://github.com/hashicorp/vault/pull/20181)] -* secrets/aws: Revert changes that removed the lease on STS credentials, while leaving the new ttl field in place. [[GH-20034](https://github.com/hashicorp/vault/pull/20034)] -* secrets/pki: Ensure cross-cluster delta WAL write failure only logs to avoid unattended forwarding. [[GH-20057](https://github.com/hashicorp/vault/pull/20057)] -* secrets/pki: Fix building of unified delta CRLs and recovery during unified delta WAL write failures. [[GH-20058](https://github.com/hashicorp/vault/pull/20058)] -* secrets/pki: Fix patching of leaf_not_after_behavior on issuers. [[GH-20341](https://github.com/hashicorp/vault/pull/20341)] -* secrets/transform (enterprise): Address SQL connection leak when cleaning expired tokens -* ui: Fix OIDC provider logo showing when domain doesn't match [[GH-20263](https://github.com/hashicorp/vault/pull/20263)] -* ui: Fix bad link to namespace when namespace name includes `.` [[GH-19799](https://github.com/hashicorp/vault/pull/19799)] -* ui: fixes browser console formatting for help command output [[GH-20064](https://github.com/hashicorp/vault/pull/20064)] -* ui: fixes remaining doc links to include /vault in path [[GH-20070](https://github.com/hashicorp/vault/pull/20070)] -* ui: remove use of htmlSafe except when first sanitized [[GH-20235](https://github.com/hashicorp/vault/pull/20235)] -* website/docs: Fix Kubernetes Auth Code Example to use the correct whitespace in import. [[GH-20216](https://github.com/hashicorp/vault/pull/20216)] - -## 1.13.1 -### March 29, 2023 - -SECURITY: - -* storage/mssql: When using Vault’s community-supported Microsoft SQL (MSSQL) database storage backend, a privileged attacker with the ability to write arbitrary data to Vault’s configuration may be able to perform arbitrary SQL commands on the underlying database server through Vault. This vulnerability, CVE-2023-0620, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-12](https://discuss.hashicorp.com/t/hcsec-2023-12-vault-s-microsoft-sql-database-storage-backend-vulnerable-to-sql-injection-via-configuration-file/52080)] -* secrets/pki: Vault’s PKI mount issuer endpoints did not correctly authorize access to remove an issuer or modify issuer metadata, potentially resulting in denial of service of the PKI mount. This bug did not affect public or private key material, trust chains or certificate issuance. This vulnerability, CVE-2023-0665, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-11](https://discuss.hashicorp.com/t/hcsec-2023-11-vault-s-pki-issuer-endpoint-did-not-correctly-authorize-access-to-issuer-metadata/52079)] -* core: HashiCorp Vault’s implementation of Shamir’s secret sharing used precomputed table lookups, and was vulnerable to cache-timing attacks. An attacker with access to, and the ability to observe a large number of unseal operations on the host through a side channel may reduce the search space of a brute force effort to recover the Shamir shares. This vulnerability, CVE-2023-25000, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-10](https://discuss.hashicorp.com/t/hcsec-2023-10-vault-vulnerable-to-cache-timing-attacks-during-seal-and-unseal-operations/52078)] - -IMPROVEMENTS: - -* auth/github: Allow for an optional Github auth token environment variable to make authenticated requests when fetching org id -website/docs: Add docs for `VAULT_AUTH_CONFIG_GITHUB_TOKEN` environment variable when writing Github config [[GH-19244](https://github.com/hashicorp/vault/pull/19244)] -* core: Allow overriding gRPC connect timeout via VAULT_GRPC_MIN_CONNECT_TIMEOUT. This is an env var rather than a config setting because we don't expect this to ever be needed. It's being added as a last-ditch -option in case all else fails for some replication issues we may not have fully reproduced. [[GH-19676](https://github.com/hashicorp/vault/pull/19676)] -* core: validate name identifiers in mssql physical storage backend prior use [[GH-19591](https://github.com/hashicorp/vault/pull/19591)] -* database/elasticsearch: Update error messages resulting from Elasticsearch API errors [[GH-19545](https://github.com/hashicorp/vault/pull/19545)] -* events: Suppress log warnings triggered when events are sent but the events system is not enabled. [[GH-19593](https://github.com/hashicorp/vault/pull/19593)] - -BUG FIXES: - -* agent: Fix panic when SIGHUP is issued to Agent while it has a non-TLS listener. [[GH-19483](https://github.com/hashicorp/vault/pull/19483)] -* core (enterprise): Attempt to reconnect to a PKCS#11 HSM if we retrieve a CKR_FUNCTION_FAILED error. -* core: Fixed issue with remounting mounts that have a non-trailing space in the 'to' or 'from' paths. [[GH-19585](https://github.com/hashicorp/vault/pull/19585)] -* kmip (enterprise): Do not require attribute Cryptographic Usage Mask when registering Secret Data managed objects. -* kmip (enterprise): Fix a problem forwarding some requests to the active node. -* openapi: Fix logic for labeling unauthenticated/sudo paths. [[GH-19600](https://github.com/hashicorp/vault/pull/19600)] -* secrets/ldap: Invalidates WAL entry for static role if `password_policy` has changed. [[GH-19640](https://github.com/hashicorp/vault/pull/19640)] -* secrets/pki: Fix PKI revocation request forwarding from standby nodes due to an error wrapping bug [[GH-19624](https://github.com/hashicorp/vault/pull/19624)] -* secrets/transform (enterprise): Fix persistence problem with rotated tokenization key versions -* ui: Fixes crypto.randomUUID error in unsecure contexts from third party ember-data library [[GH-19428](https://github.com/hashicorp/vault/pull/19428)] -* ui: fixes SSH engine config deletion [[GH-19448](https://github.com/hashicorp/vault/pull/19448)] -* ui: fixes issue navigating back a level using the breadcrumb from secret metadata view [[GH-19703](https://github.com/hashicorp/vault/pull/19703)] -* ui: fixes oidc tabs in auth form submitting with the root's default_role value after a namespace has been inputted [[GH-19541](https://github.com/hashicorp/vault/pull/19541)] -* ui: pass encodeBase64 param to HMAC transit-key-actions. [[GH-19429](https://github.com/hashicorp/vault/pull/19429)] -* ui: use URLSearchParams interface to capture namespace param from SSOs (ex. ADFS) with decoded state param in callback url [[GH-19460](https://github.com/hashicorp/vault/pull/19460)] - ## 1.13.0 -### March 01, 2023 +### Unreleased SECURITY: * secrets/ssh: removal of the deprecated dynamic keys mode. **When any remaining dynamic key leases expire**, an error stating `secret is unsupported by this backend` will be thrown by the lease manager. [[GH-18874](https://github.com/hashicorp/vault/pull/18874)] -* auth/approle: When using the Vault and Vault Enterprise (Vault) approle auth method, any authenticated user with access to the /auth/approle/role/:role_name/secret-id-accessor/destroy endpoint can destroy the secret ID of any other role by providing the secret ID accessor. This vulnerability, CVE-2023-24999 has been fixed in Vault 1.13.0, 1.12.4, 1.11.8, 1.10.11 and above. [[HSEC-2023-07](https://discuss.hashicorp.com/t/hcsec-2023-07-vault-fails-to-verify-if-approle-secretid-belongs-to-role-during-a-destroy-operation/51305)] CHANGES: @@ -272,7 +11,7 @@ CHANGES: * auth/approle: Add maximum length of 4096 for approle role_names, as this value results in HMAC calculation [[GH-17768](https://github.com/hashicorp/vault/pull/17768)] * auth: Returns invalid credentials for ldap, userpass and approle when wrong credentials are provided for existent users. This will only be used internally for implementing user lockout. [[GH-17104](https://github.com/hashicorp/vault/pull/17104)] -* core: Bump Go version to 1.20.1. +* core: Bump Go version to 1.20. * core: Vault version has been moved out of sdk and into main vault module. Plugins using sdk/useragent.String must instead use sdk/useragent.PluginString. [[GH-14229](https://github.com/hashicorp/vault/pull/14229)] * logging: Removed legacy environment variable for log format ('LOGXI_FORMAT'), should use 'VAULT_LOG_FORMAT' instead [[GH-17822](https://github.com/hashicorp/vault/pull/17822)] @@ -280,35 +19,20 @@ Plugins using sdk/useragent.String must instead use sdk/useragent.PluginString. * plugins: `GET /database/config/:name` endpoint now returns an additional `plugin_version` field in the response data. [[GH-16982](https://github.com/hashicorp/vault/pull/16982)] * plugins: `GET /sys/auth/:path/tune` and `GET /sys/mounts/:path/tune` endpoints may now return an additional `plugin_version` field in the response data if set. [[GH-17167](https://github.com/hashicorp/vault/pull/17167)] * plugins: `GET` for `/sys/auth`, `/sys/auth/:path`, `/sys/mounts`, and `/sys/mounts/:path` paths now return additional `plugin_version`, `running_plugin_version` and `running_sha256` fields in the response data for each mount. [[GH-17167](https://github.com/hashicorp/vault/pull/17167)] -* sdk: Remove version package, make useragent.String versionless. [[GH-19068](https://github.com/hashicorp/vault/pull/19068)] * secrets/aws: do not create leases for non-renewable/non-revocable STS credentials to reduce storage calls [[GH-15869](https://github.com/hashicorp/vault/pull/15869)] -* secrets/gcpkms: Updated plugin from v0.13.0 to v0.14.0 [[GH-19063](https://github.com/hashicorp/vault/pull/19063)] * sys/internal/inspect: Turns of this endpoint by default. A SIGHUP can now be used to reload the configs and turns this endpoint on. * ui: Upgrade Ember to version 4.4.0 [[GH-17086](https://github.com/hashicorp/vault/pull/17086)] FEATURES: -* **Azure Auth Managed Identities**: Allow any Azure resource that supports managed identities to authenticate with Vault [[GH-19077](https://github.com/hashicorp/vault/pull/19077)] -* **Azure Auth Rotate Root**: Add support for rotate root in Azure Auth engine [[GH-19077](https://github.com/hashicorp/vault/pull/19077)] -* **Event System (Alpha)**: Vault has a new opt-in experimental event system. Not yet suitable for production use. Events are currently only generated on writes to the KV secrets engine, but external plugins can also be updated to start generating events. [[GH-19194](https://github.com/hashicorp/vault/pull/19194)] -* **GCP Secrets Impersonated Account Support**: Add support for GCP service account impersonation, allowing callers to generate a GCP access token without requiring Vault to store or retrieve a GCP service account key for each role. [[GH-19018](https://github.com/hashicorp/vault/pull/19018)] -* **Kubernetes Secrets Engine UI**: Kubernetes is now available in the UI as a supported secrets engine. [[GH-17893](https://github.com/hashicorp/vault/pull/17893)] * **New PKI UI**: Add beta support for new and improved PKI UI [[GH-18842](https://github.com/hashicorp/vault/pull/18842)] -* **PKI Cross-Cluster Revocations**: Revocation information can now be -synchronized across primary and performance replica clusters offering -a unified CRL/OCSP view of revocations across cluster boundaries. [[GH-19196](https://github.com/hashicorp/vault/pull/19196)] * **Server UDS Listener**: Adding listener to Vault server to serve http request via unix domain socket [[GH-18227](https://github.com/hashicorp/vault/pull/18227)] * **Transit managed keys**: The transit secrets engine now supports configuring and using managed keys -* **User Lockout**: Adds support to configure the user-lockout behaviour for failed logins to prevent -brute force attacks for userpass, approle and ldap auth methods. [[GH-19230](https://github.com/hashicorp/vault/pull/19230)] -* **VMSS Flex Authentication**: Adds support for Virtual Machine Scale Set Flex Authentication [[GH-19077](https://github.com/hashicorp/vault/pull/19077)] -* **Namespaces (enterprise)**: Added the ability to allow access to secrets and more to be shared across namespaces that do not share a namespace hierarchy. Using the new `sys/config/group-policy-application` API, policies can be configured to apply outside of namespace hierarchy, allowing this kind of cross-namespace sharing. -* **OpenAPI-based Go & .NET Client Libraries (Beta)**: We have now made available two new [[OpenAPI-based Go](https://github.com/hashicorp/vault-client-go/)] & [[OpenAPI-based .NET](https://github.com/hashicorp/vault-client-dotnet/)] Client libraries (beta). You can use them to perform various secret management operations easily from your applications. +* ui: Adds Kubernetes secrets engine [[GH-17893](https://github.com/hashicorp/vault/pull/17893)] IMPROVEMENTS: * **Redis ElastiCache DB Engine**: Renamed configuration parameters for disambiguation; old parameters still supported for compatibility. [[GH-18752](https://github.com/hashicorp/vault/pull/18752)] -* Bump github.com/hashicorp/go-plugin version from 1.4.5 to 1.4.8 [[GH-19100](https://github.com/hashicorp/vault/pull/19100)] * Reduced binary size [[GH-17678](https://github.com/hashicorp/vault/pull/17678)] * agent/config: Allow config directories to be specified with -config, and allow multiple -configs to be supplied. [[GH-18403](https://github.com/hashicorp/vault/pull/18403)] * agent: Add note in logs when starting Vault Agent indicating if the version differs to the Vault Server. [[GH-18684](https://github.com/hashicorp/vault/pull/18684)] @@ -327,21 +51,17 @@ Virtual Machine Scale Set (VMSS) in flexible orchestration mode. [[GH-17540](htt * auth/azure: upgrades dependencies [[GH-17857](https://github.com/hashicorp/vault/pull/17857)] * auth/cert: Add configurable support for validating client certs with OCSP. [[GH-17093](https://github.com/hashicorp/vault/pull/17093)] * auth/cert: Support listing provisioned CRLs within the mount. [[GH-18043](https://github.com/hashicorp/vault/pull/18043)] -* auth/cf: Remove incorrect usage of CreateOperation from path_config [[GH-19098](https://github.com/hashicorp/vault/pull/19098)] * auth/gcp: Upgrades dependencies [[GH-17858](https://github.com/hashicorp/vault/pull/17858)] -* auth/oidc: Adds `abort_on_error` parameter to CLI login command to help in non-interactive contexts [[GH-19076](https://github.com/hashicorp/vault/pull/19076)] -* auth/oidc: Adds ability to set Google Workspace domain for groups search [[GH-19076](https://github.com/hashicorp/vault/pull/19076)] * auth/token (enterprise): Allow batch token creation in perfStandby nodes * auth: Allow naming login MFA methods and using those names instead of IDs in satisfying MFA requirement for requests. Make passcode arguments consistent across login MFA method types. [[GH-18610](https://github.com/hashicorp/vault/pull/18610)] * auth: Provide an IP address of the requests from Vault to a Duo challenge after successful authentication. [[GH-18811](https://github.com/hashicorp/vault/pull/18811)] +* autopilot: Update version to v.0.2.0 to add better support for respecting min quorum [[GH-17848](https://github.com/hashicorp/vault/pull/17848)] * autopilot: Update version to v.0.2.0 to add better support for respecting min quorum * cli/kv: improve kv CLI to remove data or custom metadata using kv patch [[GH-18067](https://github.com/hashicorp/vault/pull/18067)] * cli/pki: Add List-Intermediates functionality to pki client. [[GH-18463](https://github.com/hashicorp/vault/pull/18463)] * cli/pki: Add health-check subcommand to evaluate the health of a PKI instance. [[GH-17750](https://github.com/hashicorp/vault/pull/17750)] * cli/pki: Add pki issue command, which creates a CSR, has a vault mount sign it, then reimports it. [[GH-18467](https://github.com/hashicorp/vault/pull/18467)] -* cli/pki: Added "Reissue" command which allows extracting fields from an existing certificate to create a new certificate. [[GH-18499](https://github.com/hashicorp/vault/pull/18499)] -* cli/pki: Change the pki health-check --list default config output to JSON so it's a usable configuration file [[GH-19269](https://github.com/hashicorp/vault/pull/19269)] * cli: Add support for creating requests to existing non-KVv2 PATCH-capable endpoints. [[GH-17650](https://github.com/hashicorp/vault/pull/17650)] * cli: Add transit import key helper commands for BYOK to Transit/Transform. [[GH-18887](https://github.com/hashicorp/vault/pull/18887)] * cli: Support the -format=raw option, to read non-JSON Vault endpoints and original response bodies. [[GH-14945](https://github.com/hashicorp/vault/pull/14945)] @@ -369,7 +89,6 @@ with given mount_accessor and alias_identifier if locked [[GH-18279](https://git * core: added changes for user lockout workflow. [[GH-17951](https://github.com/hashicorp/vault/pull/17951)] * core: parallelize backend initialization to improve startup time for large numbers of mounts. [[GH-18244](https://github.com/hashicorp/vault/pull/18244)] * database/postgres: Support multiline strings for revocation statements. [[GH-18632](https://github.com/hashicorp/vault/pull/18632)] -* database/redis-elasticache: changed config argument names for disambiguation [[GH-19044](https://github.com/hashicorp/vault/pull/19044)] * database/snowflake: Allow parallel requests to Snowflake [[GH-17593](https://github.com/hashicorp/vault/pull/17593)] * hcp/connectivity: Add foundational OSS support for opt-in secure communication between self-managed Vault nodes and [HashiCorp Cloud Platform](https://cloud.hashicorp.com) [[GH-18228](https://github.com/hashicorp/vault/pull/18228)] * hcp/connectivity: Include HCP organization, project, and resource ID in server startup logs [[GH-18315](https://github.com/hashicorp/vault/pull/18315)] @@ -399,14 +118,10 @@ deactivated state if this is not the first unseal after an upgrade. [[GH-17879]( * sdk: Add response schema validation method framework/FieldData.ValidateStrict and two test helpers (ValidateResponse, ValidateResponseData) [[GH-18635](https://github.com/hashicorp/vault/pull/18635)] * sdk: Adding FindResponseSchema test helper to assist with response schema validation in tests [[GH-18636](https://github.com/hashicorp/vault/pull/18636)] * secrets/aws: Update dependencies [[PR-17747](https://github.com/hashicorp/vault/pull/17747)] [[GH-17747](https://github.com/hashicorp/vault/pull/17747)] -* secrets/azure: Adds ability to persist an application for the lifetime of a role. [[GH-19096](https://github.com/hashicorp/vault/pull/19096)] * secrets/azure: upgrades dependencies [[GH-17964](https://github.com/hashicorp/vault/pull/17964)] * secrets/db/mysql: Add `tls_server_name` and `tls_skip_verify` parameters [[GH-18799](https://github.com/hashicorp/vault/pull/18799)] * secrets/gcp: Upgrades dependencies [[GH-17871](https://github.com/hashicorp/vault/pull/17871)] * secrets/kubernetes: Add /check endpoint to determine if environment variables are set [[GH-18](https://github.com/hashicorp/vault-plugin-secrets-kubernetes/pull/18)] [[GH-18587](https://github.com/hashicorp/vault/pull/18587)] -* secrets/kubernetes: add /check endpoint to determine if environment variables are set [[GH-19084](https://github.com/hashicorp/vault/pull/19084)] -* secrets/kv: Emit events on write if events system enabled [[GH-19145](https://github.com/hashicorp/vault/pull/19145)] -* secrets/kv: make upgrade synchronous when no keys to upgrade [[GH-19056](https://github.com/hashicorp/vault/pull/19056)] * secrets/kv: new KVv2 mounts and KVv1 mounts without any keys will upgrade synchronously, allowing for instant use [[GH-17406](https://github.com/hashicorp/vault/pull/17406)] * secrets/pki: Add a new API that returns the serial numbers of revoked certificates on the local cluster [[GH-17779](https://github.com/hashicorp/vault/pull/17779)] * secrets/pki: Add support to specify signature bits when generating CSRs through intermediate/generate apis [[GH-17388](https://github.com/hashicorp/vault/pull/17388)] @@ -437,43 +152,29 @@ which is repeated on batch responses to help more easily correlate inputs with o * ui: Enable typescript for future development [[GH-17927](https://github.com/hashicorp/vault/pull/17927)] * ui: Prepends "passcode=" if not provided in user input for duo totp mfa method authentication [[GH-18342](https://github.com/hashicorp/vault/pull/18342)] * ui: Update language on database role to "Connection name" [[GH-18261](https://github.com/hashicorp/vault/issues/18261)] [[GH-18350](https://github.com/hashicorp/vault/pull/18350)] -* ui: adds allowed_response_headers as param for secret engine mount config [[GH-19216](https://github.com/hashicorp/vault/pull/19216)] * ui: consolidate all tag usage [[GH-17866](https://github.com/hashicorp/vault/pull/17866)] * ui: mfa: use proper request id generation [[GH-17835](https://github.com/hashicorp/vault/pull/17835)] -* ui: remove wizard [[GH-19220](https://github.com/hashicorp/vault/pull/19220)] * ui: update DocLink component to use new host url: developer.hashicorp.com [[GH-18374](https://github.com/hashicorp/vault/pull/18374)] * ui: update TTL picker for consistency [[GH-18114](https://github.com/hashicorp/vault/pull/18114)] * ui: use the combined activity log (partial + historic) API for client count dashboard and remove use of monthly endpoint [[GH-17575](https://github.com/hashicorp/vault/pull/17575)] * vault/diagnose: Upgrade `go.opentelemetry.io/otel`, `go.opentelemetry.io/otel/sdk`, `go.opentelemetry.io/otel/trace` to v1.11.2 [[GH-18589](https://github.com/hashicorp/vault/pull/18589)] -DEPRECATIONS: - -* secrets/ad: Marks the Active Directory (AD) secrets engine as deprecated. [[GH-19334](https://github.com/hashicorp/vault/pull/19334)] - BUG FIXES: * api: Remove timeout logic from ReadRaw functions and add ReadRawWithContext [[GH-18708](https://github.com/hashicorp/vault/pull/18708)] * auth/alicloud: fix regression in vault login command that caused login to fail [[GH-19005](https://github.com/hashicorp/vault/pull/19005)] -* auth/approle: Add nil check for the secret ID entry when deleting via secret id accessor preventing cross role secret id deletion [[GH-19186](https://github.com/hashicorp/vault/pull/19186)] * auth/approle: Fix `token_bound_cidrs` validation when using /32 blocks for role and secret ID [[GH-18145](https://github.com/hashicorp/vault/pull/18145)] * auth/cert: Address a race condition accessing the loaded crls without a lock [[GH-18945](https://github.com/hashicorp/vault/pull/18945)] * auth/kubernetes: Ensure a consistent TLS configuration for all k8s API requests [[#173](https://github.com/hashicorp/vault-plugin-auth-kubernetes/pull/173)] [[GH-18716](https://github.com/hashicorp/vault/pull/18716)] -* auth/kubernetes: fixes and dep updates for the auth-kubernetes plugin (see plugin changelog for details) [[GH-19094](https://github.com/hashicorp/vault/pull/19094)] * auth/okta: fix a panic for AuthRenew in Okta [[GH-18011](https://github.com/hashicorp/vault/pull/18011)] * auth: Deduplicate policies prior to ACL generation [[GH-17914](https://github.com/hashicorp/vault/pull/17914)] * cli/kv: skip formatting of nil secrets for patch and put with field parameter set [[GH-18163](https://github.com/hashicorp/vault/pull/18163)] -* cli/pki: Decode integer values properly in health-check configuration file [[GH-19265](https://github.com/hashicorp/vault/pull/19265)] -* cli/pki: Fix path for role health-check warning messages [[GH-19274](https://github.com/hashicorp/vault/pull/19274)] -* cli/pki: Properly report permission issues within health-check mount tune checks [[GH-19276](https://github.com/hashicorp/vault/pull/19276)] -* cli/transit: Fix import, import-version command invocation [[GH-19373](https://github.com/hashicorp/vault/pull/19373)] * cli: Fix issue preventing kv commands from executing properly when the mount path provided by `-mount` flag and secret key path are the same. [[GH-17679](https://github.com/hashicorp/vault/pull/17679)] * cli: Fix vault read handling to return raw data as secret.Data when there is no top-level data object from api response. [[GH-17913](https://github.com/hashicorp/vault/pull/17913)] * cli: Remove empty table heading for `vault secrets list -detailed` output. [[GH-17577](https://github.com/hashicorp/vault/pull/17577)] * command/namespace: Fix vault cli namespace patch examples in help text. [[GH-18143](https://github.com/hashicorp/vault/pull/18143)] * core (enterprise): Fix missing quotation mark in error message * core (enterprise): Fix panic that could occur with SSCT alongside invoking external plugins for revocation. -* core (enterprise): Fix panic when using invalid accessor for control-group request -* core (enterprise): Fix perf standby WAL streaming silently failures when replication setup happens at a bad time. * core (enterprise): Supported storage check in `vault server` command will no longer prevent startup. Instead, a warning will be logged if configured to use storage backend other than `raft` or `consul`. * core/activity: add namespace breakdown for new clients when date range spans multiple months, including the current month. [[GH-18766](https://github.com/hashicorp/vault/pull/18766)] * core/activity: de-duplicate namespaces when historical and current month data are mixed [[GH-18452](https://github.com/hashicorp/vault/pull/18452)] @@ -494,8 +195,6 @@ BUG FIXES: * core: Fix spurious `permission denied` for all HelpOperations on sudo-protected paths [[GH-18568](https://github.com/hashicorp/vault/pull/18568)] * core: Fix vault operator init command to show the right curl string with -output-curl-string and right policy hcl with -output-policy [[GH-17514](https://github.com/hashicorp/vault/pull/17514)] * core: Fixes spurious warnings being emitted relating to "unknown or unsupported fields" for JSON config [[GH-17660](https://github.com/hashicorp/vault/pull/17660)] -* core: Linux packages now have vendor label and set the default label to HashiCorp. -This fix is implemented for any future releases, but will not be updated for historical releases. * core: Prevent panics in `sys/leases/lookup`, `sys/leases/revoke`, and `sys/leases/renew` endpoints if provided `lease_id` is null [[GH-18951](https://github.com/hashicorp/vault/pull/18951)] * core: Refactor lock grabbing code to simplify stateLock deadlock investigations [[GH-17187](https://github.com/hashicorp/vault/pull/17187)] * core: fix GPG encryption to support subkeys. [[GH-16224](https://github.com/hashicorp/vault/pull/16224)] @@ -510,9 +209,6 @@ mount loop if default policies are not yet synced from the active node. [[GH-178 * credential/cert: adds error message if no tls connection is found during the AliasLookahead operation [[GH-17904](https://github.com/hashicorp/vault/pull/17904)] * database/mongodb: Fix writeConcern set to be applied to any query made on the database [[GH-18546](https://github.com/hashicorp/vault/pull/18546)] * expiration: Prevent panics on perf standbys when an irrevocable lease gets deleted. [[GH-18401](https://github.com/hashicorp/vault/pull/18401)] -* kmip (enterprise): Fix a problem with some multi-part MAC Verify operations. -* kmip (enterprise): Only require data to be full blocks on encrypt/decrypt operations using CBC and ECB block cipher modes. -* license (enterprise): Fix bug where license would update even if the license didn't change. * licensing (enterprise): update autoloaded license cache after reload * login: Store token in tokenhelper for interactive login MFA [[GH-17040](https://github.com/hashicorp/vault/pull/17040)] * openapi: Fix many incorrect details in generated API spec, by using better techniques to parse path regexps [[GH-18554](https://github.com/hashicorp/vault/pull/18554)] @@ -524,13 +220,8 @@ mount loop if default policies are not yet synced from the active node. [[GH-178 * plugins: Only report deprecation status for builtin plugins. [[GH-17816](https://github.com/hashicorp/vault/pull/17816)] * plugins: Skip loading but still mount data associated with missing plugins on unseal. [[GH-18189](https://github.com/hashicorp/vault/pull/18189)] * plugins: Vault upgrades will no longer fail if a mount has been created using an explicit builtin plugin version. [[GH-18051](https://github.com/hashicorp/vault/pull/18051)] -* replication (enterprise): Fix bug where reloading external plugin on a secondary would -break replication. * sdk: Don't panic if system view or storage methods called during plugin setup. [[GH-18210](https://github.com/hashicorp/vault/pull/18210)] * secret/pki: fix bug with initial legacy bundle migration (from < 1.11 into 1.11+) and missing issuers from ca_chain [[GH-17772](https://github.com/hashicorp/vault/pull/17772)] -* secrets/ad: Fix bug where updates to config would fail if password isn't provided [[GH-19061](https://github.com/hashicorp/vault/pull/19061)] -* secrets/gcp: fix issue where IAM bindings were not preserved during policy update [[GH-19018](https://github.com/hashicorp/vault/pull/19018)] -* secrets/mongodb-atlas: Fix a bug that did not allow WAL rollback to handle partial failures when creating API keys [[GH-19111](https://github.com/hashicorp/vault/pull/19111)] * secrets/pki: Address nil panic when an empty POST request is sent to the OCSP handler [[GH-18184](https://github.com/hashicorp/vault/pull/18184)] * secrets/pki: Allow patching issuer to set an empty issuer name. [[GH-18466](https://github.com/hashicorp/vault/pull/18466)] * secrets/pki: Do not read revoked certificates from backend when CRL is disabled [[GH-17385](https://github.com/hashicorp/vault/pull/17385)] @@ -538,172 +229,23 @@ break replication. * secrets/pki: Fixes duplicate otherName in certificates created by the sign-verbatim endpoint. [[GH-16700](https://github.com/hashicorp/vault/pull/16700)] * secrets/pki: OCSP GET request parameter was not being URL unescaped before processing. [[GH-18938](https://github.com/hashicorp/vault/pull/18938)] * secrets/pki: Respond to tidy-status, tidy-cancel on PR Secondary clusters. [[GH-17497](https://github.com/hashicorp/vault/pull/17497)] -* secrets/pki: Revert fix for PR [18938](https://github.com/hashicorp/vault/pull/18938) [[GH-19037](https://github.com/hashicorp/vault/pull/19037)] * secrets/pki: consistently use UTC for CA's notAfter exceeded error message [[GH-18984](https://github.com/hashicorp/vault/pull/18984)] * secrets/pki: fix race between tidy's cert counting and tidy status reporting. [[GH-18899](https://github.com/hashicorp/vault/pull/18899)] * secrets/transit: Do not warn about unrecognized parameter 'batch_input' [[GH-18299](https://github.com/hashicorp/vault/pull/18299)] * secrets/transit: Honor `partial_success_response_code` on decryption failures. [[GH-18310](https://github.com/hashicorp/vault/pull/18310)] -* server/config: Use file.Stat when checking file permissions when VAULT_ENABLE_FILE_PERMISSIONS_CHECK is enabled [[GH-19311](https://github.com/hashicorp/vault/pull/19311)] * storage/raft (enterprise): An already joined node can rejoin by wiping storage and re-issueing a join request, but in doing so could transiently become a non-voter. In some scenarios this resulted in loss of quorum. [[GH-18263](https://github.com/hashicorp/vault/pull/18263)] * storage/raft: Don't panic on unknown raft ops [[GH-17732](https://github.com/hashicorp/vault/pull/17732)] * storage/raft: Fix race with follower heartbeat tracker during teardown. [[GH-18704](https://github.com/hashicorp/vault/pull/18704)] * ui/keymgmt: Sets the defaultValue for type when creating a key. [[GH-17407](https://github.com/hashicorp/vault/pull/17407)] -* ui: Fix bug where logging in via OIDC fails if browser is in fullscreen mode [[GH-19071](https://github.com/hashicorp/vault/pull/19071)] * ui: Fixes issue with not being able to download raft snapshot via service worker [[GH-17769](https://github.com/hashicorp/vault/pull/17769)] * ui: Fixes oidc/jwt login issue with alternate mount path and jwt login via mount path tab [[GH-17661](https://github.com/hashicorp/vault/pull/17661)] -* ui: Remove `default` and add `default-service` and `default-batch` to UI token_type for auth mount and tuning. [[GH-19290](https://github.com/hashicorp/vault/pull/19290)] * ui: Remove default value of 30 to TtlPicker2 if no value is passed in. [[GH-17376](https://github.com/hashicorp/vault/pull/17376)] -* ui: allow selection of "default" for ssh algorithm_signer in web interface [[GH-17894](https://github.com/hashicorp/vault/pull/17894)] * ui: cleanup unsaved auth method ember data record when navigating away from mount backend form [[GH-18651](https://github.com/hashicorp/vault/pull/18651)] * ui: fix entity policies list link to policy show page [[GH-17950](https://github.com/hashicorp/vault/pull/17950)] * ui: fixes query parameters not passed in api explorer test requests [[GH-18743](https://github.com/hashicorp/vault/pull/18743)] -* ui: fixes reliance on secure context (https) by removing methods using the Crypto interface [[GH-19403](https://github.com/hashicorp/vault/pull/19403)] -* ui: show Get credentials button for static roles detail page when a user has the proper permissions. [[GH-19190](https://github.com/hashicorp/vault/pull/19190)] - -## 1.12.7 -### June 08, 2023 - -CHANGES: - -* core: Bump Go version to 1.19.9. -* core: Revert #19676 (VAULT_GRPC_MIN_CONNECT_TIMEOUT env var) as we decided it was unnecessary. [[GH-20826](https://github.com/hashicorp/vault/pull/20826)] - -IMPROVEMENTS: - -* audit: add a `mount_point` field to audit requests and response entries [[GH-20411](https://github.com/hashicorp/vault/pull/20411)] -* command/server: Add support for dumping pprof files to the filesystem via SIGUSR2 when -`VAULT_PPROF_WRITE_TO_FILE=true` is set on the server. [[GH-20609](https://github.com/hashicorp/vault/pull/20609)] -* core: include namespace path in granting_policies block of audit log -* openapi: Fix generated types for duration strings [[GH-20841](https://github.com/hashicorp/vault/pull/20841)] -* sdk/framework: Fix non-deterministic ordering of 'required' fields in OpenAPI spec [[GH-20881](https://github.com/hashicorp/vault/pull/20881)] -* secrets/pki: add subject key identifier to read key response [[GH-20642](https://github.com/hashicorp/vault/pull/20642)] -* ui: update TTL picker for consistency [[GH-18114](https://github.com/hashicorp/vault/pull/18114)] - -BUG FIXES: - -* api: Properly Handle nil identity_policies in Secret Data [[GH-20636](https://github.com/hashicorp/vault/pull/20636)] -* auth/ldap: Set default value for `max_page_size` properly [[GH-20453](https://github.com/hashicorp/vault/pull/20453)] -* cli: CLI should take days as a unit of time for ttl like flags [[GH-20477](https://github.com/hashicorp/vault/pull/20477)] -* cli: disable printing flags warnings messages for the ssh command [[GH-20502](https://github.com/hashicorp/vault/pull/20502)] -* core (enterprise): Fix log shipper buffer size overflow issue for 32 bit architecture. -* core (enterprise): Fix logshipper buffer size to default to DefaultBufferSize only when reported system memory is zero. -* core (enterprise): Remove MFA Enforcment configuration for namespace when deleting namespace -* core: prevent panic on login after namespace is deleted that had mfa enforcement [[GH-20375](https://github.com/hashicorp/vault/pull/20375)] -* replication (enterprise): Fix a race condition with invalid tokens during WAL streaming that was causing Secondary clusters to be unable to connect to a Primary. -* replication (enterprise): fix bug where secondary grpc connections would timeout when connecting to a primary host that no longer exists. -* secrets/transform (enterprise): Fix a caching bug affecting secondary nodes after a tokenization key rotation -* secrets/transit: Fix export of HMAC-only key, correctly exporting the key used for sign operations. For consumers of the previously incorrect key, use the plaintext export to retrieve these incorrect keys and import them as new versions. -secrets/transit: Fix bug related to shorter dedicated HMAC key sizing. -sdk/helper/keysutil: New HMAC type policies will have HMACKey equal to Key and be copied over on import. [[GH-20864](https://github.com/hashicorp/vault/pull/20864)] -* ui: Fixes issue unsealing cluster for seal types other than shamir [[GH-20897](https://github.com/hashicorp/vault/pull/20897)] - -## 1.12.6 -### April 26, 2023 - -CHANGES: - -* core: Bump Go version to 1.19.8. - -IMPROVEMENTS: - -* cli/namespace: Add detailed flag to output additional namespace information -such as namespace IDs and custom metadata. [[GH-20243](https://github.com/hashicorp/vault/pull/20243)] -* core/activity: add an endpoint to write test activity log data, guarded by a build flag [[GH-20019](https://github.com/hashicorp/vault/pull/20019)] -* core: Add a `raft` sub-field to the `storage` and `ha_storage` details provided by the -`/sys/config/state/sanitized` endpoint in order to include the `max_entry_size`. [[GH-20044](https://github.com/hashicorp/vault/pull/20044)] -* sdk/ldaputil: added `connection_timeout` to tune connection timeout duration -for all LDAP plugins. [[GH-20144](https://github.com/hashicorp/vault/pull/20144)] -* secrets/pki: Decrease size and improve compatibility of OCSP responses by removing issuer certificate. [[GH-20201](https://github.com/hashicorp/vault/pull/20201)] - -BUG FIXES: - -* auth/ldap: Add max_page_size configurable to LDAP configuration [[GH-19032](https://github.com/hashicorp/vault/pull/19032)] -* command/server: Fix incorrect paths in generated config for `-dev-tls` flag on Windows [[GH-20257](https://github.com/hashicorp/vault/pull/20257)] -* core (enterprise): Fix intermittent issue with token entries sometimes not being found when using a newly created token in a request to a secondary, even when SSCT `new_token` forwarding is set. When this occurred, this would result in the following error to the client: `error performing token check: no lease entry found for token that ought to have one, possible eventual consistency issue`. -* core (enterprise): Fix read on perf standbys failing with 412 after leadership change, unseal, restores or restarts when no writes occur -* core/ssct (enterprise): Fixed race condition where a newly promoted DR may revert `sscGenCounter` -resulting in 412 errors. -* core: Fix regression breaking non-raft clusters whose nodes share the same cluster_addr/api_addr. [[GH-19721](https://github.com/hashicorp/vault/pull/19721)] -* helper/random: Fix race condition in string generator helper [[GH-19875](https://github.com/hashicorp/vault/pull/19875)] -* kmip (enterprise): Fix a problem decrypting with keys that have no Process Start Date attribute. -* openapi: Fix many incorrect details in generated API spec, by using better techniques to parse path regexps [[GH-18554](https://github.com/hashicorp/vault/pull/18554)] -* pki: Fix automatically turning off CRL signing on upgrade to Vault >= 1.12, if CA Key Usage disallows it [[GH-20220](https://github.com/hashicorp/vault/pull/20220)] -* replication (enterprise): Fix a caching issue when replicating filtered data to -a performance secondary. This resulted in the data being set to nil in the cache -and a "invalid value" error being returned from the API. -* replication (enterprise): Fix replication status for Primary clusters showing its primary cluster's information (in case of DR) in secondaries field when known_secondaries field is nil -* secrets/pki: Fix patching of leaf_not_after_behavior on issuers. [[GH-20341](https://github.com/hashicorp/vault/pull/20341)] -* secrets/transform (enterprise): Address SQL connection leak when cleaning expired tokens -* ui: Fix OIDC provider logo showing when domain doesn't match [[GH-20263](https://github.com/hashicorp/vault/pull/20263)] -* ui: Fix bad link to namespace when namespace name includes `.` [[GH-19799](https://github.com/hashicorp/vault/pull/19799)] -* ui: fixes browser console formatting for help command output [[GH-20064](https://github.com/hashicorp/vault/pull/20064)] -* ui: remove use of htmlSafe except when first sanitized [[GH-20235](https://github.com/hashicorp/vault/pull/20235)] -## 1.12.5 -### March 29, 2023 - -SECURITY: - -* storage/mssql: When using Vault’s community-supported Microsoft SQL (MSSQL) database storage backend, a privileged attacker with the ability to write arbitrary data to Vault’s configuration may be able to perform arbitrary SQL commands on the underlying database server through Vault. This vulnerability, CVE-2023-0620, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-12](https://discuss.hashicorp.com/t/hcsec-2023-12-vault-s-microsoft-sql-database-storage-backend-vulnerable-to-sql-injection-via-configuration-file/52080)] -* secrets/pki: Vault’s PKI mount issuer endpoints did not correctly authorize access to remove an issuer or modify issuer metadata, potentially resulting in denial of service of the PKI mount. This bug did not affect public or private key material, trust chains or certificate issuance. This vulnerability, CVE-2023-0665, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-11](https://discuss.hashicorp.com/t/hcsec-2023-11-vault-s-pki-issuer-endpoint-did-not-correctly-authorize-access-to-issuer-metadata/52079)] -* core: HashiCorp Vault’s implementation of Shamir’s secret sharing used precomputed table lookups, and was vulnerable to cache-timing attacks. An attacker with access to, and the ability to observe a large number of unseal operations on the host through a side channel may reduce the search space of a brute force effort to recover the Shamir shares. This vulnerability, CVE-2023-25000, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-10](https://discuss.hashicorp.com/t/hcsec-2023-10-vault-vulnerable-to-cache-timing-attacks-during-seal-and-unseal-operations/52078)] - -IMPROVEMENTS: - -* auth/github: Allow for an optional Github auth token environment variable to make authenticated requests when fetching org id -website/docs: Add docs for `VAULT_AUTH_CONFIG_GITHUB_TOKEN` environment variable when writing Github config [[GH-19244](https://github.com/hashicorp/vault/pull/19244)] -* core: Allow overriding gRPC connect timeout via VAULT_GRPC_MIN_CONNECT_TIMEOUT. This is an env var rather than a config setting because we don't expect this to ever be needed. It's being added as a last-ditch -option in case all else fails for some replication issues we may not have fully reproduced. [[GH-19676](https://github.com/hashicorp/vault/pull/19676)] -* core: validate name identifiers in mssql physical storage backend prior use [[GH-19591](https://github.com/hashicorp/vault/pull/19591)] - -BUG FIXES: - -* cli: Fix vault read handling to return raw data as secret.Data when there is no top-level data object from api response. [[GH-17913](https://github.com/hashicorp/vault/pull/17913)] -* core (enterprise): Attempt to reconnect to a PKCS#11 HSM if we retrieve a CKR_FUNCTION_FAILED error. -* core: Fixed issue with remounting mounts that have a non-trailing space in the 'to' or 'from' paths. [[GH-19585](https://github.com/hashicorp/vault/pull/19585)] -* kmip (enterprise): Do not require attribute Cryptographic Usage Mask when registering Secret Data managed objects. -* kmip (enterprise): Fix a problem forwarding some requests to the active node. -* openapi: Fix logic for labeling unauthenticated/sudo paths. [[GH-19600](https://github.com/hashicorp/vault/pull/19600)] -* secrets/ldap: Invalidates WAL entry for static role if `password_policy` has changed. [[GH-19641](https://github.com/hashicorp/vault/pull/19641)] -* secrets/transform (enterprise): Fix persistence problem with rotated tokenization key versions -* ui: fixes issue navigating back a level using the breadcrumb from secret metadata view [[GH-19703](https://github.com/hashicorp/vault/pull/19703)] -* ui: pass encodeBase64 param to HMAC transit-key-actions. [[GH-19429](https://github.com/hashicorp/vault/pull/19429)] -* ui: use URLSearchParams interface to capture namespace param from SSOs (ex. ADFS) with decoded state param in callback url [[GH-19460](https://github.com/hashicorp/vault/pull/19460)] - -## 1.12.4 -### March 01, 2023 - -SECURITY: -* auth/approle: When using the Vault and Vault Enterprise (Vault) approle auth method, any authenticated user with access to the /auth/approle/role/:role_name/secret-id-accessor/destroy endpoint can destroy the secret ID of any other role by providing the secret ID accessor. This vulnerability, CVE-2023-24999 has been fixed in Vault 1.13.0, 1.12.4, 1.11.8, 1.10.11 and above. [[HSEC-2023-07](https://discuss.hashicorp.com/t/hcsec-2023-07-vault-fails-to-verify-if-approle-secretid-belongs-to-role-during-a-destroy-operation/51305)] - -CHANGES: - -* core: Bump Go version to 1.19.6. - -IMPROVEMENTS: - -* secrets/database: Adds error message requiring password on root crednetial rotation. [[GH-19103](https://github.com/hashicorp/vault/pull/19103)] -* ui: remove wizard [[GH-19220](https://github.com/hashicorp/vault/pull/19220)] - -BUG FIXES: - -* auth/approle: Add nil check for the secret ID entry when deleting via secret id accessor preventing cross role secret id deletion [[GH-19186](https://github.com/hashicorp/vault/pull/19186)] -* core (enterprise): Fix panic when using invalid accessor for control-group request -* core (enterprise): Fix perf standby WAL streaming silently failures when replication setup happens at a bad time. -* core: Prevent panics in `sys/leases/lookup`, `sys/leases/revoke`, and `sys/leases/renew` endpoints if provided `lease_id` is null [[GH-18951](https://github.com/hashicorp/vault/pull/18951)] -* license (enterprise): Fix bug where license would update even if the license didn't change. -* replication (enterprise): Fix bug where reloading external plugin on a secondary would -break replication. -* secrets/ad: Fix bug where config couldn't be updated unless binddn/bindpass were included in the update. [[GH-18207](https://github.com/hashicorp/vault/pull/18207)] -* secrets/pki: Revert fix for PR [18938](https://github.com/hashicorp/vault/pull/18938) [[GH-19037](https://github.com/hashicorp/vault/pull/19037)] -* server/config: Use file.Stat when checking file permissions when VAULT_ENABLE_FILE_PERMISSIONS_CHECK is enabled [[GH-19311](https://github.com/hashicorp/vault/pull/19311)] -* ui (enterprise): Fix cancel button from transform engine role creation page [[GH-19135](https://github.com/hashicorp/vault/pull/19135)] -* ui: Fix bug where logging in via OIDC fails if browser is in fullscreen mode [[GH-19071](https://github.com/hashicorp/vault/pull/19071)] -* ui: fixes reliance on secure context (https) by removing methods using the Crypto interface [[GH-19410](https://github.com/hashicorp/vault/pull/19410)] -* ui: show Get credentials button for static roles detail page when a user has the proper permissions. [[GH-19190](https://github.com/hashicorp/vault/pull/19190)] - ## 1.12.3 ### February 6, 2023 @@ -741,7 +283,7 @@ BUG FIXES: * core: fix bug where context cancellations weren't forwarded to active node from performance standbys. * core: prevent panic in login mfa enforcement delete after enforcement's namespace is deleted [[GH-18923](https://github.com/hashicorp/vault/pull/18923)] * database/mongodb: Fix writeConcern set to be applied to any query made on the database [[GH-18546](https://github.com/hashicorp/vault/pull/18546)] -* expiration: Prevent panics on perf standbys when an irrevocable lease gets deleted. [[GH-18401](https://github.com/hashicorp/vault/pull/18401)] +* expiration: Prevent panics on perf standbys when an irrevocable release gets deleted. [[GH-18401](https://github.com/hashicorp/vault/pull/18401)] * kmip (enterprise): Fix Destroy operation response that omitted Unique Identifier on some batched responses. * kmip (enterprise): Fix Locate operation response incompatibility with clients using KMIP versions prior to 1.3. * kmip (enterprise): Fix Query operation response that omitted streaming capability and supported profiles. @@ -822,10 +364,6 @@ BUG FIXES: ## 1.12.0 ### October 13, 2022 -SECURITY: - -* secrets/pki: Vault’s TLS certificate auth method did not initially load the optionally-configured CRL issued by the role’s CA into memory on startup, resulting in the revocation list not being checked, if the CRL has not yet been retrieved. This vulnerability, CVE-2022-41316, is fixed in Vault 1.12.0, 1.11.4, 1.10.7, and 1.9.10. [[HSEC-2022-24](https://discuss.hashicorp.com/t/hcsec-2022-24-vaults-tls-cert-auth-method-only-loaded-crl-after-first-request/45483)] - CHANGES: * api: Exclusively use `GET /sys/plugins/catalog` endpoint for listing plugins, and add `details` field to list responses. [[GH-17347](https://github.com/hashicorp/vault/pull/17347)] @@ -1048,127 +586,6 @@ BUG FIXES: * ui: OIDC login type uses localStorage instead of sessionStorage [[GH-16170](https://github.com/hashicorp/vault/pull/16170)] * vault: Fix a bug where duplicate policies could be added to an identity group. [[GH-15638](https://github.com/hashicorp/vault/pull/15638)] -## 1.11.11 -### June 08, 2023 - -CHANGES: - -* core: Bump Go version to 1.19.9. -* core: Revert #19676 (VAULT_GRPC_MIN_CONNECT_TIMEOUT env var) as we decided it was unnecessary. [[GH-20826](https://github.com/hashicorp/vault/pull/20826)] - -IMPROVEMENTS: - -* command/server: Add support for dumping pprof files to the filesystem via SIGUSR2 when -`VAULT_PPROF_WRITE_TO_FILE=true` is set on the server. [[GH-20609](https://github.com/hashicorp/vault/pull/20609)] -* secrets/pki: add subject key identifier to read key response [[GH-20642](https://github.com/hashicorp/vault/pull/20642)] -* ui: update TTL picker for consistency [[GH-18114](https://github.com/hashicorp/vault/pull/18114)] - -BUG FIXES: - -* api: Properly Handle nil identity_policies in Secret Data [[GH-20636](https://github.com/hashicorp/vault/pull/20636)] -* auth/ldap: Set default value for `max_page_size` properly [[GH-20453](https://github.com/hashicorp/vault/pull/20453)] -* cli: CLI should take days as a unit of time for ttl like flags [[GH-20477](https://github.com/hashicorp/vault/pull/20477)] -* core (enterprise): Fix log shipper buffer size overflow issue for 32 bit architecture. -* core (enterprise): Fix logshipper buffer size to default to DefaultBufferSize only when reported system memory is zero. -* core (enterprise): Remove MFA Enforcment configuration for namespace when deleting namespace -* core: prevent panic on login after namespace is deleted that had mfa enforcement [[GH-20375](https://github.com/hashicorp/vault/pull/20375)] -* replication (enterprise): Fix a race condition with invalid tokens during WAL streaming that was causing Secondary clusters to be unable to connect to a Primary. -* replication (enterprise): fix bug where secondary grpc connections would timeout when connecting to a primary host that no longer exists. -* secrets/transform (enterprise): Fix a caching bug affecting secondary nodes after a tokenization key rotation - -## 1.11.10 -### April 26, 2023 - -CHANGES: - -* core: Bump Go version to 1.19.8. - -IMPROVEMENTS: - -* cli/namespace: Add detailed flag to output additional namespace information -such as namespace IDs and custom metadata. [[GH-20243](https://github.com/hashicorp/vault/pull/20243)] -* core/activity: add an endpoint to write test activity log data, guarded by a build flag [[GH-20019](https://github.com/hashicorp/vault/pull/20019)] -* core: Add a `raft` sub-field to the `storage` and `ha_storage` details provided by the -`/sys/config/state/sanitized` endpoint in order to include the `max_entry_size`. [[GH-20044](https://github.com/hashicorp/vault/pull/20044)] -* sdk/ldaputil: added `connection_timeout` to tune connection timeout duration -for all LDAP plugins. [[GH-20144](https://github.com/hashicorp/vault/pull/20144)] - -BUG FIXES: - -* auth/ldap: Add max_page_size configurable to LDAP configuration [[GH-19032](https://github.com/hashicorp/vault/pull/19032)] -* core (enterprise): Fix intermittent issue with token entries sometimes not being found when using a newly created token in a request to a secondary, even when SSCT `new_token` forwarding is set. When this occurred, this would result in the following error to the client: `error performing token check: no lease entry found for token that ought to have one, possible eventual consistency issue`. -* core (enterprise): Fix read on perf standbys failing with 412 after leadership change, unseal, restores or restarts when no writes occur -* core/ssct (enterprise): Fixed race condition where a newly promoted DR may revert `sscGenCounter` -resulting in 412 errors. -* core: Fix regression breaking non-raft clusters whose nodes share the same cluster_addr/api_addr. [[GH-19721](https://github.com/hashicorp/vault/pull/19721)] -* helper/random: Fix race condition in string generator helper [[GH-19875](https://github.com/hashicorp/vault/pull/19875)] -* openapi: Fix many incorrect details in generated API spec, by using better techniques to parse path regexps [[GH-18554](https://github.com/hashicorp/vault/pull/18554)] -* replication (enterprise): Fix replication status for Primary clusters showing its primary cluster's information (in case of DR) in secondaries field when known_secondaries field is nil -* secrets/pki: Fix patching of leaf_not_after_behavior on issuers. [[GH-20341](https://github.com/hashicorp/vault/pull/20341)] -* secrets/transform (enterprise): Address SQL connection leak when cleaning expired tokens -* ui: Fix OIDC provider logo showing when domain doesn't match [[GH-20263](https://github.com/hashicorp/vault/pull/20263)] -* ui: Fix bad link to namespace when namespace name includes `.` [[GH-19799](https://github.com/hashicorp/vault/pull/19799)] -* ui: fixes browser console formatting for help command output [[GH-20064](https://github.com/hashicorp/vault/pull/20064)] -* ui: remove use of htmlSafe except when first sanitized [[GH-20235](https://github.com/hashicorp/vault/pull/20235)] - -## 1.11.9 -### March 29, 2023 - -SECURITY: - -* storage/mssql: When using Vault’s community-supported Microsoft SQL (MSSQL) database storage backend, a privileged attacker with the ability to write arbitrary data to Vault’s configuration may be able to perform arbitrary SQL commands on the underlying database server through Vault. This vulnerability, CVE-2023-0620, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-12](https://discuss.hashicorp.com/t/hcsec-2023-12-vault-s-microsoft-sql-database-storage-backend-vulnerable-to-sql-injection-via-configuration-file/52080)] -* secrets/pki: Vault’s PKI mount issuer endpoints did not correctly authorize access to remove an issuer or modify issuer metadata, potentially resulting in denial of service of the PKI mount. This bug did not affect public or private key material, trust chains or certificate issuance. This vulnerability, CVE-2023-0665, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-11](https://discuss.hashicorp.com/t/hcsec-2023-11-vault-s-pki-issuer-endpoint-did-not-correctly-authorize-access-to-issuer-metadata/52079)] -* core: HashiCorp Vault’s implementation of Shamir’s secret sharing used precomputed table lookups, and was vulnerable to cache-timing attacks. An attacker with access to, and the ability to observe a large number of unseal operations on the host through a side channel may reduce the search space of a brute force effort to recover the Shamir shares. This vulnerability, CVE-2023-25000, is fixed in Vault 1.13.1, 1.12.5, and 1.11.9. [[HCSEC-2023-10](https://discuss.hashicorp.com/t/hcsec-2023-10-vault-vulnerable-to-cache-timing-attacks-during-seal-and-unseal-operations/52078)] - -IMPROVEMENTS: - -* auth/github: Allow for an optional Github auth token environment variable to make authenticated requests when fetching org id -website/docs: Add docs for `VAULT_AUTH_CONFIG_GITHUB_TOKEN` environment variable when writing Github config [[GH-19244](https://github.com/hashicorp/vault/pull/19244)] -* core: Allow overriding gRPC connect timeout via VAULT_GRPC_MIN_CONNECT_TIMEOUT. This is an env var rather than a config setting because we don't expect this to ever be needed. It's being added as a last-ditch -option in case all else fails for some replication issues we may not have fully reproduced. [[GH-19676](https://github.com/hashicorp/vault/pull/19676)] -* core: validate name identifiers in mssql physical storage backend prior use [[GH-19591](https://github.com/hashicorp/vault/pull/19591)] - -BUG FIXES: - -* auth/kubernetes: Ensure a consistent TLS configuration for all k8s API requests [[#190](https://github.com/hashicorp/vault-plugin-auth-kubernetes/pull/190)] [[GH-19720](https://github.com/hashicorp/vault/pull/19720)] -* cli: Fix vault read handling to return raw data as secret.Data when there is no top-level data object from api response. [[GH-17913](https://github.com/hashicorp/vault/pull/17913)] -* core (enterprise): Attempt to reconnect to a PKCS#11 HSM if we retrieve a CKR_FUNCTION_FAILED error. -* core: Fixed issue with remounting mounts that have a non-trailing space in the 'to' or 'from' paths. [[GH-19585](https://github.com/hashicorp/vault/pull/19585)] -* openapi: Fix logic for labeling unauthenticated/sudo paths. [[GH-19600](https://github.com/hashicorp/vault/pull/19600)] -* secrets/transform (enterprise): Fix persistence problem with rotated tokenization key versions -* ui: fixes issue navigating back a level using the breadcrumb from secret metadata view [[GH-19703](https://github.com/hashicorp/vault/pull/19703)] -* ui: pass encodeBase64 param to HMAC transit-key-actions. [[GH-19429](https://github.com/hashicorp/vault/pull/19429)] -* ui: use URLSearchParams interface to capture namespace param from SSOs (ex. ADFS) with decoded state param in callback url [[GH-19460](https://github.com/hashicorp/vault/pull/19460)] - -## 1.11.8 -### March 01, 2023 - -SECURITY: - -* auth/approle: When using the Vault and Vault Enterprise (Vault) approle auth method, any authenticated user with access to the /auth/approle/role/:role_name/secret-id-accessor/destroy endpoint can destroy the secret ID of any other role by providing the secret ID accessor. This vulnerability, CVE-2023-24999 has been fixed in Vault 1.13.0, 1.12.4, 1.11.8, 1.10.11 and above. [[HSEC-2023-07](https://discuss.hashicorp.com/t/hcsec-2023-07-vault-fails-to-verify-if-approle-secretid-belongs-to-role-during-a-destroy-operation/51305)] - -CHANGES: - -* core: Bump Go version to 1.19.6. - -IMPROVEMENTS: - -* secrets/database: Adds error message requiring password on root crednetial rotation. [[GH-19103](https://github.com/hashicorp/vault/pull/19103)] - -BUG FIXES: - -* auth/approle: Add nil check for the secret ID entry when deleting via secret id accessor preventing cross role secret id deletion [[GH-19186](https://github.com/hashicorp/vault/pull/19186)] -* core (enterprise): Fix panic when using invalid accessor for control-group request -* core (enterprise): Fix perf standby WAL streaming silently failures when replication setup happens at a bad time. -* core: Prevent panics in `sys/leases/lookup`, `sys/leases/revoke`, and `sys/leases/renew` endpoints if provided `lease_id` is null [[GH-18951](https://github.com/hashicorp/vault/pull/18951)] -* license (enterprise): Fix bug where license would update even if the license didn't change. -* replication (enterprise): Fix bug where reloading external plugin on a secondary would -break replication. -* secrets/ad: Fix bug where config couldn't be updated unless binddn/bindpass were included in the update. [[GH-18208](https://github.com/hashicorp/vault/pull/18208)] -* ui (enterprise): Fix cancel button from transform engine role creation page [[GH-19135](https://github.com/hashicorp/vault/pull/19135)] -* ui: Fix bug where logging in via OIDC fails if browser is in fullscreen mode [[GH-19071](https://github.com/hashicorp/vault/pull/19071)] -* ui: show Get credentials button for static roles detail page when a user has the proper permissions. [[GH-19190](https://github.com/hashicorp/vault/pull/19190)] - ## 1.11.7 ### February 6, 2023 @@ -1256,10 +673,6 @@ BUG FIXES: ## 1.11.4 ### September 30, 2022 -SECURITY: - -* secrets/pki: Vault’s TLS certificate auth method did not initially load the optionally-configured CRL issued by the role’s CA into memory on startup, resulting in the revocation list not being checked, if the CRL has not yet been retrieved. This vulnerability, CVE-2022-41316, is fixed in Vault 1.12.0, 1.11.4, 1.10.7, and 1.9.10. [[HSEC-2022-24](https://discuss.hashicorp.com/t/hcsec-2022-24-vaults-tls-cert-auth-method-only-loaded-crl-after-first-request/45483)] - IMPROVEMENTS: * agent/auto-auth: Add `exit_on_err` which when set to true, will cause Agent to exit if any errors are encountered during authentication. [[GH-17091](https://github.com/hashicorp/vault/pull/17091)] @@ -1282,10 +695,6 @@ BUG FIXES: ## 1.11.3 ### August 31, 2022 -SECURITY: - -* core: When entity aliases mapped to a single entity share the same alias name, but have different mount accessors, Vault can leak metadata between the aliases. This metadata leak may result in unexpected access if templated policies are using alias metadata for path names. This vulnerability, CVE-2022-40186, is fixed in 1.11.3, 1.10.6, and 1.9.9. [[HSEC-2022-18](https://discuss.hashicorp.com/t/hcsec-2022-18-vault-entity-alias-metadata-may-leak-between-aliases-with-the-same-name-assigned-to-the-same-entity/44550)] - CHANGES: * core: Bump Go version to 1.17.13. @@ -1343,10 +752,6 @@ BUG FIXES: ## 1.11.1 ### July 21, 2022 -SECURITY: - -* storage/raft: Vault Enterprise (“Vault”) clusters using Integrated Storage expose an unauthenticated API endpoint that could be abused to override the voter status of a node within a Vault HA cluster, introducing potential for future data loss or catastrophic failure. This vulnerability, CVE-2022-36129, was fixed in Vault 1.9.8, 1.10.5, and 1.11.1. [[HSEC-2022-15](https://discuss.hashicorp.com/t/hcsec-2022-15-vault-enterprise-does-not-verify-existing-voter-status-when-joining-an-integrated-storage-ha-node/42420)] - CHANGES: * core: Bump Go version to 1.17.12. @@ -1598,32 +1003,6 @@ rebuilt upon changes to the list of issuers. [[GH-15179](https://github.com/hash * ui: fix search-select component showing blank selections when editing group member entity [[GH-15058](https://github.com/hashicorp/vault/pull/15058)] * ui: masked values no longer give away length or location of special characters [[GH-15025](https://github.com/hashicorp/vault/pull/15025)] -## 1.10.11 -### March 01, 2023 - -SECURITY: - -* auth/approle: When using the Vault and Vault Enterprise (Vault) approle auth method, any authenticated user with access to the /auth/approle/role/:role_name/secret-id-accessor/destroy endpoint can destroy the secret ID of any other role by providing the secret ID accessor. This vulnerability, CVE-2023-24999 has been fixed in Vault 1.13.0, 1.12.4, 1.11.8, 1.10.11 and above. [[HSEC-2023-07](https://discuss.hashicorp.com/t/hcsec-2023-07-vault-fails-to-verify-if-approle-secretid-belongs-to-role-during-a-destroy-operation/51305)] - -CHANGES: - -* core: Bump Go version to 1.19.6. - -IMPROVEMENTS: - -* secrets/database: Adds error message requiring password on root crednetial rotation. [[GH-19103](https://github.com/hashicorp/vault/pull/19103)] - -BUG FIXES: - -* auth/approle: Add nil check for the secret ID entry when deleting via secret id accessor preventing cross role secret id deletion [[GH-19186](https://github.com/hashicorp/vault/pull/19186)] -* core (enterprise): Fix panic when using invalid accessor for control-group request -* core: Prevent panics in `sys/leases/lookup`, `sys/leases/revoke`, and `sys/leases/renew` endpoints if provided `lease_id` is null [[GH-18951](https://github.com/hashicorp/vault/pull/18951)] -* replication (enterprise): Fix bug where reloading external plugin on a secondary would -break replication. -* secrets/ad: Fix bug where config couldn't be updated unless binddn/bindpass were included in the update. [[GH-18209](https://github.com/hashicorp/vault/pull/18209)] -* ui (enterprise): Fix cancel button from transform engine role creation page [[GH-19135](https://github.com/hashicorp/vault/pull/19135)] -* ui: Fix bug where logging in via OIDC fails if browser is in fullscreen mode [[GH-19071](https://github.com/hashicorp/vault/pull/19071)] - ## 1.10.10 ### February 6, 2023 @@ -1689,10 +1068,6 @@ BUG FIXES: ## 1.10.7 ### September 30, 2022 -SECURITY: - -* secrets/pki: Vault’s TLS certificate auth method did not initially load the optionally-configured CRL issued by the role’s CA into memory on startup, resulting in the revocation list not being checked, if the CRL has not yet been retrieved. This vulnerability, CVE-2022-41316, is fixed in Vault 1.12.0, 1.11.4, 1.10.7, and 1.9.10. [[HSEC-2022-24](https://discuss.hashicorp.com/t/hcsec-2022-24-vaults-tls-cert-auth-method-only-loaded-crl-after-first-request/45483)] - BUG FIXES: * auth/cert: Vault does not initially load the CRLs in cert auth unless the read/write CRL endpoint is hit. [[GH-17138](https://github.com/hashicorp/vault/pull/17138)] @@ -1707,10 +1082,6 @@ BUG FIXES: ## 1.10.6 ### August 31, 2022 -SECURITY: - -* core: When entity aliases mapped to a single entity share the same alias name, but have different mount accessors, Vault can leak metadata between the aliases. This metadata leak may result in unexpected access if templated policies are using alias metadata for path names. This vulnerability, CVE-2022-40186, is fixed in 1.11.3, 1.10.6, and 1.9.9. [[HSEC-2022-18](https://discuss.hashicorp.com/t/hcsec-2022-18-vault-entity-alias-metadata-may-leak-between-aliases-with-the-same-name-assigned-to-the-same-entity/44550)] - CHANGES: * core: Bump Go version to 1.17.13. @@ -1744,10 +1115,6 @@ SECURITY: ## 1.10.5 ### July 21, 2022 -SECURITY: - -* storage/raft: Vault Enterprise (“Vault”) clusters using Integrated Storage expose an unauthenticated API endpoint that could be abused to override the voter status of a node within a Vault HA cluster, introducing potential for future data loss or catastrophic failure. This vulnerability, CVE-2022-36129, was fixed in Vault 1.9.8, 1.10.5, and 1.11.1. [[HSEC-2022-15](https://discuss.hashicorp.com/t/hcsec-2022-15-vault-enterprise-does-not-verify-existing-voter-status-when-joining-an-integrated-storage-ha-node/42420)] - CHANGES: * core/fips: Disable and warn about entropy augmentation in FIPS 140-2 Inside mode [[GH-15858](https://github.com/hashicorp/vault/pull/15858)] @@ -2105,10 +1472,6 @@ operation for upgraded configurations with a `root_password_ttl` of zero. [[GH-1 ## 1.9.10 ### September 30, 2022 -SECURITY: - -* secrets/pki: Vault’s TLS certificate auth method did not initially load the optionally-configured CRL issued by the role’s CA into memory on startup, resulting in the revocation list not being checked, if the CRL has not yet been retrieved. This vulnerability, CVE-2022-41316, is fixed in Vault 1.12.0, 1.11.4, 1.10.7, and 1.9.10. [[HSEC-2022-24](https://discuss.hashicorp.com/t/hcsec-2022-24-vaults-tls-cert-auth-method-only-loaded-crl-after-first-request/45483)] - BUG FIXES: * auth/cert: Vault does not initially load the CRLs in cert auth unless the read/write CRL endpoint is hit. [[GH-17138](https://github.com/hashicorp/vault/pull/17138)] @@ -2118,10 +1481,6 @@ BUG FIXES: ## 1.9.9 ### August 31, 2022 -SECURITY: - -* core: When entity aliases mapped to a single entity share the same alias name, but have different mount accessors, Vault can leak metadata between the aliases. This metadata leak may result in unexpected access if templated policies are using alias metadata for path names. This vulnerability, CVE-2022-40186, is fixed in 1.11.3, 1.10.6, and 1.9.9. [[HSEC-2022-18](https://discuss.hashicorp.com/t/hcsec-2022-18-vault-entity-alias-metadata-may-leak-between-aliases-with-the-same-name-assigned-to-the-same-entity/44550)] - CHANGES: * core: Bump Go version to 1.17.13. @@ -2142,10 +1501,6 @@ SECURITY: ## 1.9.8 ### July 21, 2022 -SECURITY: - -* storage/raft: Vault Enterprise (“Vault”) clusters using Integrated Storage expose an unauthenticated API endpoint that could be abused to override the voter status of a node within a Vault HA cluster, introducing potential for future data loss or catastrophic failure. This vulnerability, CVE-2022-36129, was fixed in Vault 1.9.8, 1.10.5, and 1.11.1. [[HSEC-2022-15](https://discuss.hashicorp.com/t/hcsec-2022-15-vault-enterprise-does-not-verify-existing-voter-status-when-joining-an-integrated-storage-ha-node/42420)] - CHANGES: * core: Bump Go version to 1.17.12. diff --git a/CODEOWNERS b/CODEOWNERS index e13a6b53982222..7c15d5a7a69e72 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -25,12 +25,12 @@ /plugins/ @hashicorp/vault-ecosystem /vault/plugin_catalog.go @hashicorp/vault-ecosystem -/website/content/ @hashicorp/vault-education-approvers -/website/content/docs/plugin-portal.mdx @acahn @hashicorp/vault-education-approvers +/website/content/ @yhyakuna +/website/content/docs/plugin-portal.mdx @acahn @yhyakuna # Plugin docs -/website/content/docs/plugins/ @fairclothjm @hashicorp/vault-education-approvers -/website/content/docs/upgrading/plugins.mdx @fairclothjm @hashicorp/vault-education-approvers +/website/content/docs/plugins/ @fairclothjm @yhyakuna +/website/content/docs/upgrading/plugins.mdx @fairclothjm @yhyakuna # UI code related to Vault's JWT/OIDC auth method and OIDC provider. # Changes to these files often require coordination with backend code, @@ -47,38 +47,20 @@ /enos/ @hashicorp/quality-team # Cryptosec -/builtin/logical/pki/ @hashicorp/vault-crypto -/builtin/logical/pkiext/ @hashicorp/vault-crypto -/website/content/docs/secrets/pki/ @hashicorp/vault-crypto -/website/content/api-docs/secret/pki.mdx @hashicorp/vault-crypto -/builtin/credential/cert/ @hashicorp/vault-crypto -/website/content/docs/auth/cert.mdx @hashicorp/vault-crypto -/website/content/api-docs/auth/cert.mdx @hashicorp/vault-crypto -/builtin/logical/ssh/ @hashicorp/vault-crypto -/website/content/docs/secrets/ssh/ @hashicorp/vault-crypto -/website/content/api-docs/secret/ssh.mdx @hashicorp/vault-crypto -/builtin/logical/transit/ @hashicorp/vault-crypto -/website/content/docs/secrets/transit/ @hashicorp/vault-crypto -/website/content/api-docs/secret/transit.mdx @hashicorp/vault-crypto -/helper/random/ @hashicorp/vault-crypto -/sdk/helper/certutil/ @hashicorp/vault-crypto -/sdk/helper/cryptoutil/ @hashicorp/vault-crypto -/sdk/helper/kdf/ @hashicorp/vault-crypto -/sdk/helper/keysutil/ @hashicorp/vault-crypto -/sdk/helper/ocsp/ @hashicorp/vault-crypto -/sdk/helper/salt/ @hashicorp/vault-crypto -/sdk/helper/tlsutil/ @hashicorp/vault-crypto -/shamir/ @hashicorp/vault-crypto -/vault/barrier* @hashicorp/vault-crypto -/vault/managed_key* @hashicorp/vault-crypto -/vault/seal* @hashicorp/vault-crypto -/vault/seal/ @hashicorp/vault-crypto -/website/content/docs/configuration/seal/ @hashicorp/vault-crypto -/website/content/docs/enterprise/sealwrap.mdx @hashicorp/vault-crypto -/website/content/api-docs/system/sealwrap-rewrap.mdx @hashicorp/vault-crypto -/website/content/docs/secrets/transform/ @hashicorp/vault-crypto -/website/content/api-docs/secret/transform.mdx @hashicorp/vault-crypto -/website/content/docs/secrets/kmip-profiles.mdx @hashicorp/vault-crypto -/website/content/docs/secrets/kmip.mdx @hashicorp/vault-crypto -/website/content/api-docs/secret/kmip.mdx @hashicorp/vault-crypto -/website/content/docs/enterprise/fips/ @hashicorp/vault-crypto +/builtin/logical/pki/ @hashicorp/vault-crypto +/builtin/credential/cert/ @hashicorp/vault-crypto +/builtin/logical/ssh/ @hashicorp/vault-crypto +/builtin/logical/transit/ @hashicorp/vault-crypto +/helper/random/ @hashicorp/vault-crypto +/sdk/helper/certutil/ @hashicorp/vault-crypto +/sdk/helper/cryptoutil/ @hashicorp/vault-crypto +/sdk/helper/kdf/ @hashicorp/vault-crypto +/sdk/helper/keysutil/ @hashicorp/vault-crypto +/sdk/helper/ocsp/ @hashicorp/vault-crypto +/sdk/helper/salt/ @hashicorp/vault-crypto +/sdk/helper/tlsutil/ @hashicorp/vault-crypto +/shamir/ @hashicorp/vault-crypto +/vault/barrier* @hashicorp/vault-crypto +/vault/managed_key* @hashicorp/vault-crypto +/vault/seal* @hashicorp/vault-crypto +/vault/seal/ @hashicorp/vault-crypto diff --git a/Dockerfile b/Dockerfile index d088623b834baf..5d4ce5f581999a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - ## DOCKERHUB DOCKERFILE ## FROM alpine:3.15 as default diff --git a/Makefile b/Makefile index 82fed7a838830f..5128a878dc2a54 100644 --- a/Makefile +++ b/Makefile @@ -111,20 +111,6 @@ vet: echo "and fix them if necessary before submitting the code for reviewal."; \ fi -# deprecations runs staticcheck tool to look for deprecations. Checks entire code to see if it -# has deprecated function, variable, constant or field -deprecations: - make bootstrap - repositoryName=$(basename `git rev-parse --show-toplevel`) - ./scripts/deprecations-checker.sh "" repositoryName - -# ci-deprecations runs staticcheck tool to look for deprecations. All output gets piped to revgrep -# which will only return an error if changes that is not on main has deprecated function, variable, constant or field -ci-deprecations: - make bootstrap - repositoryName=$(basename `git rev-parse --show-toplevel`) - ./scripts/deprecations-checker.sh main repositoryName - # tools/godoctests/.bin/godoctests builds the custom analyzer to check for godocs for tests tools/godoctests/.bin/godoctests: @cd tools/godoctests && $(GO_CMD) build -o .bin/godoctests . @@ -139,21 +125,6 @@ vet-godoctests: bootstrap tools/godoctests/.bin/godoctests ci-vet-godoctests: ci-bootstrap tools/godoctests/.bin/godoctests @$(GO_CMD) vet -vettool=./tools/godoctests/.bin/godoctests $(TEST) 2>&1 | revgrep origin/main -# tools/gonilnilfunctions/.bin/gonilnilfunctions builds the custom analyzer to check for nil, nil function returns -tools/gonilnilfunctions/.bin/gonilnilfunctions: - @cd tools/gonilnilfunctions && $(GO_CMD) build -o .bin/gonilnilfunctions . - -# vet-gonilnilfunctions runs gonilnilfunctions on functions. All output gets piped to revgrep -# which will only return an error if a new function returns nil, nil (where one of the nils could be an error) -vet-gonilnilfunctions: bootstrap tools/gonilnilfunctions/.bin/gonilnilfunctions - @$(GO_CMD) vet -vettool=./tools/gonilnilfunctions/.bin/gonilnilfunctions ./... 2>&1 | revgrep - -# ci-vet-gonilnilfunctions runs gonilnilfunctions on functions. All output gets piped to revgrep -# which will only return an error if a new function that is not on main has an issue -ci-vet-gonilnilfunctions: ci-bootstrap tools/gonilnilfunctions/.bin/gonilnilfunctions - @$(GO_CMD) vet -vettool=./tools/gonilnilfunctions/.bin/gonilnilfunctions ./... 2>&1 | revgrep origin/main - - # lint runs vet plus a number of other checkers, it is more comprehensive, but louder lint: @$(GO_CMD) list -f '{{.Dir}}' ./... | grep -v /vendor/ \ @@ -194,7 +165,7 @@ static-assets-dir: install-ui-dependencies: @echo "--> Installing JavaScript assets" - @cd ui && yarn + @cd ui && yarn --ignore-optional test-ember: install-ui-dependencies @echo "--> Running ember tests" @@ -244,8 +215,8 @@ proto: bootstrap # No additional sed expressions should be added to this list. Going forward # we should just use the variable names choosen by protobuf. These are left # here for backwards compatability, namely for SDK compilation. - $(SED) -i -e 's/Id/ID/' -e 's/SPDX-License-IDentifier/SPDX-License-Identifier/' vault/request_forwarding_service.pb.go - $(SED) -i -e 's/Idp/IDP/' -e 's/Url/URL/' -e 's/Id/ID/' -e 's/IDentity/Identity/' -e 's/EntityId/EntityID/' -e 's/Api/API/' -e 's/Qr/QR/' -e 's/Totp/TOTP/' -e 's/Mfa/MFA/' -e 's/Pingid/PingID/' -e 's/namespaceId/namespaceID/' -e 's/Ttl/TTL/' -e 's/BoundCidrs/BoundCIDRs/' -e 's/SPDX-License-IDentifier/SPDX-License-Identifier/' helper/identity/types.pb.go helper/identity/mfa/types.pb.go helper/storagepacker/types.pb.go sdk/plugin/pb/backend.pb.go sdk/logical/identity.pb.go vault/activity/activity_log.pb.go + $(SED) -i -e 's/Id/ID/' vault/request_forwarding_service.pb.go + $(SED) -i -e 's/Idp/IDP/' -e 's/Url/URL/' -e 's/Id/ID/' -e 's/IDentity/Identity/' -e 's/EntityId/EntityID/' -e 's/Api/API/' -e 's/Qr/QR/' -e 's/Totp/TOTP/' -e 's/Mfa/MFA/' -e 's/Pingid/PingID/' -e 's/namespaceId/namespaceID/' -e 's/Ttl/TTL/' -e 's/BoundCidrs/BoundCIDRs/' helper/identity/types.pb.go helper/identity/mfa/types.pb.go helper/storagepacker/types.pb.go sdk/plugin/pb/backend.pb.go sdk/logical/identity.pb.go vault/activity/activity_log.pb.go # This will inject the sentinel struct tags as decorated in the proto files. protoc-go-inject-tag -input=./helper/identity/types.pb.go @@ -296,7 +267,7 @@ hana-database-plugin: mongodb-database-plugin: @CGO_ENABLED=0 $(GO_CMD) build -o bin/mongodb-database-plugin ./plugins/database/mongodb/mongodb-database-plugin -.PHONY: bin default prep test vet bootstrap ci-bootstrap fmt fmtcheck mysql-database-plugin mysql-legacy-database-plugin cassandra-database-plugin influxdb-database-plugin postgresql-database-plugin mssql-database-plugin hana-database-plugin mongodb-database-plugin ember-dist ember-dist-dev static-dist static-dist-dev assetcheck check-vault-in-path packages build build-ci semgrep semgrep-ci vet-godoctests ci-vet-godoctests vet-gonilnilfunctions ci-vet-gonilnilfunctions +.PHONY: bin default prep test vet bootstrap ci-bootstrap fmt fmtcheck mysql-database-plugin mysql-legacy-database-plugin cassandra-database-plugin influxdb-database-plugin postgresql-database-plugin mssql-database-plugin hana-database-plugin mongodb-database-plugin ember-dist ember-dist-dev static-dist static-dist-dev assetcheck check-vault-in-path packages build build-ci semgrep semgrep-ci vet-godoctests ci-vet-godoctests .NOTPARALLEL: ember-dist ember-dist-dev diff --git a/api/api_test.go b/api/api_test.go index 8bf69e0de97a63..e4ba3153203eb2 100644 --- a/api/api_test.go +++ b/api/api_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/auth.go b/api/auth.go index c1ef7a77989d59..ab38acfbaec21d 100644 --- a/api/auth.go +++ b/api/auth.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/auth/approle/approle.go b/api/auth/approle/approle.go index 10d26b610f42e2..b8cf012284418f 100644 --- a/api/auth/approle/approle.go +++ b/api/auth/approle/approle.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( diff --git a/api/auth/approle/approle_test.go b/api/auth/approle/approle_test.go index cdfb4e285e7923..f2628c695cc278 100644 --- a/api/auth/approle/approle_test.go +++ b/api/auth/approle/approle_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( diff --git a/api/auth/approle/go.mod b/api/auth/approle/go.mod index a1d1f478514776..f321791dbdd542 100644 --- a/api/auth/approle/go.mod +++ b/api/auth/approle/go.mod @@ -2,4 +2,4 @@ module github.com/hashicorp/vault/api/auth/approle go 1.16 -require github.com/hashicorp/vault/api v1.9.2 +require github.com/hashicorp/vault/api v1.9.0 diff --git a/api/auth/approle/go.sum b/api/auth/approle/go.sum index dfd66b2ccb2478..1ac812646c1889 100644 --- a/api/auth/approle/go.sum +++ b/api/auth/approle/go.sum @@ -7,11 +7,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -39,8 +36,8 @@ github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0S github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/vault/api v1.9.2 h1:YjkZLJ7K3inKgMZ0wzCU9OHqc+UqMQyXsPXnf3Cl2as= -github.com/hashicorp/vault/api v1.9.2/go.mod h1:jo5Y/ET+hNyz+JnKDt8XLAdKs+AM0G5W0Vp1IrFI8N8= +github.com/hashicorp/vault/api v1.9.0 h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8= +github.com/hashicorp/vault/api v1.9.0/go.mod h1:lloELQP4EyhjnCQhF8agKvWIVTmxbpEJj70b98959sM= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= @@ -65,29 +62,24 @@ github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkB github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -95,16 +87,16 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -113,5 +105,7 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/api/auth/aws/aws.go b/api/auth/aws/aws.go index f2aa9be1d00c45..44a4f6db1f5843 100644 --- a/api/auth/aws/aws.go +++ b/api/auth/aws/aws.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( diff --git a/api/auth/aws/go.mod b/api/auth/aws/go.mod index 86261e766a14bf..19e8f0175b7c20 100644 --- a/api/auth/aws/go.mod +++ b/api/auth/aws/go.mod @@ -7,5 +7,5 @@ require ( github.com/hashicorp/go-hclog v0.16.2 github.com/hashicorp/go-secure-stdlib/awsutil v0.1.6 github.com/hashicorp/go-uuid v1.0.2 - github.com/hashicorp/vault/api v1.9.2 + github.com/hashicorp/vault/api v1.9.0 ) diff --git a/api/auth/aws/go.sum b/api/auth/aws/go.sum index 4d30b11ac27878..2a723f1eae18a6 100644 --- a/api/auth/aws/go.sum +++ b/api/auth/aws/go.sum @@ -10,12 +10,9 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -47,8 +44,8 @@ github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2I github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/vault/api v1.9.2 h1:YjkZLJ7K3inKgMZ0wzCU9OHqc+UqMQyXsPXnf3Cl2as= -github.com/hashicorp/vault/api v1.9.2/go.mod h1:jo5Y/ET+hNyz+JnKDt8XLAdKs+AM0G5W0Vp1IrFI8N8= +github.com/hashicorp/vault/api v1.9.0 h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8= +github.com/hashicorp/vault/api v1.9.0/go.mod h1:lloELQP4EyhjnCQhF8agKvWIVTmxbpEJj70b98959sM= github.com/jmespath/go-jmespath v0.3.0 h1:OS12ieG61fsCg5+qLJ+SsW9NicxNkg3b25OyT2yCeUc= github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -84,30 +81,25 @@ github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIH github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -116,16 +108,16 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -136,6 +128,8 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= diff --git a/api/auth/azure/azure.go b/api/auth/azure/azure.go index b682195701150a..a09d15a1472145 100644 --- a/api/auth/azure/azure.go +++ b/api/auth/azure/azure.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package azure import ( diff --git a/api/auth/azure/go.mod b/api/auth/azure/go.mod index 3a192f3e06263e..4e5c95f5a7b4fc 100644 --- a/api/auth/azure/go.mod +++ b/api/auth/azure/go.mod @@ -2,4 +2,4 @@ module github.com/hashicorp/vault/api/auth/azure go 1.16 -require github.com/hashicorp/vault/api v1.9.2 +require github.com/hashicorp/vault/api v1.9.0 diff --git a/api/auth/azure/go.sum b/api/auth/azure/go.sum index dfd66b2ccb2478..1ac812646c1889 100644 --- a/api/auth/azure/go.sum +++ b/api/auth/azure/go.sum @@ -7,11 +7,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -39,8 +36,8 @@ github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0S github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/vault/api v1.9.2 h1:YjkZLJ7K3inKgMZ0wzCU9OHqc+UqMQyXsPXnf3Cl2as= -github.com/hashicorp/vault/api v1.9.2/go.mod h1:jo5Y/ET+hNyz+JnKDt8XLAdKs+AM0G5W0Vp1IrFI8N8= +github.com/hashicorp/vault/api v1.9.0 h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8= +github.com/hashicorp/vault/api v1.9.0/go.mod h1:lloELQP4EyhjnCQhF8agKvWIVTmxbpEJj70b98959sM= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= @@ -65,29 +62,24 @@ github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkB github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -95,16 +87,16 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -113,5 +105,7 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/api/auth/gcp/gcp.go b/api/auth/gcp/gcp.go index 2d6ef842a4b43b..a5dd9364612811 100644 --- a/api/auth/gcp/gcp.go +++ b/api/auth/gcp/gcp.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package gcp import ( diff --git a/api/auth/gcp/go.mod b/api/auth/gcp/go.mod index 7509753daf55ee..f2611e35de0506 100644 --- a/api/auth/gcp/go.mod +++ b/api/auth/gcp/go.mod @@ -4,7 +4,7 @@ go 1.16 require ( cloud.google.com/go v0.97.0 - github.com/hashicorp/vault/api v1.9.2 + github.com/hashicorp/vault/api v1.9.0 google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0 google.golang.org/grpc v1.41.0 // indirect ) diff --git a/api/auth/gcp/go.sum b/api/auth/gcp/go.sum index 35da60ad89ecb3..8584be4a8d2159 100644 --- a/api/auth/gcp/go.sum +++ b/api/auth/gcp/go.sum @@ -82,8 +82,6 @@ github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeME github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -187,8 +185,8 @@ github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/vault/api v1.9.2 h1:YjkZLJ7K3inKgMZ0wzCU9OHqc+UqMQyXsPXnf3Cl2as= -github.com/hashicorp/vault/api v1.9.2/go.mod h1:jo5Y/ET+hNyz+JnKDt8XLAdKs+AM0G5W0Vp1IrFI8N8= +github.com/hashicorp/vault/api v1.9.0 h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8= +github.com/hashicorp/vault/api v1.9.0/go.mod h1:lloELQP4EyhjnCQhF8agKvWIVTmxbpEJj70b98959sM= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= @@ -248,12 +246,11 @@ go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqe golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -326,9 +323,8 @@ golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLd golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -409,11 +405,11 @@ golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -423,8 +419,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -625,6 +621,8 @@ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= diff --git a/api/auth/kubernetes/go.mod b/api/auth/kubernetes/go.mod index 107628276e5d83..f7e418df1b0338 100644 --- a/api/auth/kubernetes/go.mod +++ b/api/auth/kubernetes/go.mod @@ -2,4 +2,4 @@ module github.com/hashicorp/vault/api/auth/kubernetes go 1.16 -require github.com/hashicorp/vault/api v1.9.2 +require github.com/hashicorp/vault/api v1.9.0 diff --git a/api/auth/kubernetes/go.sum b/api/auth/kubernetes/go.sum index dfd66b2ccb2478..1ac812646c1889 100644 --- a/api/auth/kubernetes/go.sum +++ b/api/auth/kubernetes/go.sum @@ -7,11 +7,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -39,8 +36,8 @@ github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0S github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/vault/api v1.9.2 h1:YjkZLJ7K3inKgMZ0wzCU9OHqc+UqMQyXsPXnf3Cl2as= -github.com/hashicorp/vault/api v1.9.2/go.mod h1:jo5Y/ET+hNyz+JnKDt8XLAdKs+AM0G5W0Vp1IrFI8N8= +github.com/hashicorp/vault/api v1.9.0 h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8= +github.com/hashicorp/vault/api v1.9.0/go.mod h1:lloELQP4EyhjnCQhF8agKvWIVTmxbpEJj70b98959sM= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= @@ -65,29 +62,24 @@ github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkB github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -95,16 +87,16 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -113,5 +105,7 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/api/auth/kubernetes/kubernetes.go b/api/auth/kubernetes/kubernetes.go index f0e38c17a2b474..c2fef86a5fd0ec 100644 --- a/api/auth/kubernetes/kubernetes.go +++ b/api/auth/kubernetes/kubernetes.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kubernetes import ( diff --git a/api/auth/ldap/go.mod b/api/auth/ldap/go.mod index 4bfa972ebf0fb0..f7413c4d4ce17c 100644 --- a/api/auth/ldap/go.mod +++ b/api/auth/ldap/go.mod @@ -2,4 +2,4 @@ module github.com/hashicorp/vault/api/auth/ldap go 1.16 -require github.com/hashicorp/vault/api v1.9.2 +require github.com/hashicorp/vault/api v1.9.0 diff --git a/api/auth/ldap/go.sum b/api/auth/ldap/go.sum index dfd66b2ccb2478..1ac812646c1889 100644 --- a/api/auth/ldap/go.sum +++ b/api/auth/ldap/go.sum @@ -7,11 +7,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -39,8 +36,8 @@ github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0S github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/vault/api v1.9.2 h1:YjkZLJ7K3inKgMZ0wzCU9OHqc+UqMQyXsPXnf3Cl2as= -github.com/hashicorp/vault/api v1.9.2/go.mod h1:jo5Y/ET+hNyz+JnKDt8XLAdKs+AM0G5W0Vp1IrFI8N8= +github.com/hashicorp/vault/api v1.9.0 h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8= +github.com/hashicorp/vault/api v1.9.0/go.mod h1:lloELQP4EyhjnCQhF8agKvWIVTmxbpEJj70b98959sM= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= @@ -65,29 +62,24 @@ github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkB github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -95,16 +87,16 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -113,5 +105,7 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/api/auth/ldap/ldap.go b/api/auth/ldap/ldap.go index fdf1a38dd0c11c..9f37abc664f7ff 100644 --- a/api/auth/ldap/ldap.go +++ b/api/auth/ldap/ldap.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( diff --git a/api/auth/ldap/ldap_test.go b/api/auth/ldap/ldap_test.go index abdccb03583567..8633c4dfac1160 100644 --- a/api/auth/ldap/ldap_test.go +++ b/api/auth/ldap/ldap_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( diff --git a/api/auth/userpass/go.mod b/api/auth/userpass/go.mod index 194bc6c952b714..fa4712b0547398 100644 --- a/api/auth/userpass/go.mod +++ b/api/auth/userpass/go.mod @@ -2,4 +2,4 @@ module github.com/hashicorp/vault/api/auth/userpass go 1.16 -require github.com/hashicorp/vault/api v1.9.2 +require github.com/hashicorp/vault/api v1.9.0 diff --git a/api/auth/userpass/go.sum b/api/auth/userpass/go.sum index dfd66b2ccb2478..1ac812646c1889 100644 --- a/api/auth/userpass/go.sum +++ b/api/auth/userpass/go.sum @@ -7,11 +7,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -39,8 +36,8 @@ github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0S github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/vault/api v1.9.2 h1:YjkZLJ7K3inKgMZ0wzCU9OHqc+UqMQyXsPXnf3Cl2as= -github.com/hashicorp/vault/api v1.9.2/go.mod h1:jo5Y/ET+hNyz+JnKDt8XLAdKs+AM0G5W0Vp1IrFI8N8= +github.com/hashicorp/vault/api v1.9.0 h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8= +github.com/hashicorp/vault/api v1.9.0/go.mod h1:lloELQP4EyhjnCQhF8agKvWIVTmxbpEJj70b98959sM= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= @@ -65,29 +62,24 @@ github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkB github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -95,16 +87,16 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -113,5 +105,7 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/api/auth/userpass/userpass.go b/api/auth/userpass/userpass.go index 3e8942953d3a81..124cd7a68f8e97 100644 --- a/api/auth/userpass/userpass.go +++ b/api/auth/userpass/userpass.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( diff --git a/api/auth/userpass/userpass_test.go b/api/auth/userpass/userpass_test.go index 4fe68d8d4ef12f..0728117a1e8c42 100644 --- a/api/auth/userpass/userpass_test.go +++ b/api/auth/userpass/userpass_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( diff --git a/api/auth_test.go b/api/auth_test.go index ca69630cce5e9c..4dc71a09844d96 100644 --- a/api/auth_test.go +++ b/api/auth_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/auth_token.go b/api/auth_token.go index 1980be06ef5b9b..52be1e7852b946 100644 --- a/api/auth_token.go +++ b/api/auth_token.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/client.go b/api/client.go index d20477e1d9d7b9..79276abcc5f587 100644 --- a/api/client.go +++ b/api/client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( @@ -203,7 +200,6 @@ type Config struct { // commands such as 'vault operator raft snapshot' as this redirects to the // primary node. DisableRedirects bool - clientTLSConfig *tls.Config } // TLSConfig contains the parameters needed to configure TLS on the HTTP client @@ -338,17 +334,10 @@ func (c *Config) configureTLS(t *TLSConfig) error { if t.TLSServerName != "" { clientTLSConfig.ServerName = t.TLSServerName } - c.clientTLSConfig = clientTLSConfig return nil } -func (c *Config) TLSConfig() *tls.Config { - c.modifyLock.RLock() - defer c.modifyLock.RUnlock() - return c.clientTLSConfig.Clone() -} - // ConfigureTLS takes a set of TLS configurations and applies those to the // HTTP client. func (c *Config) ConfigureTLS(t *TLSConfig) error { @@ -673,7 +662,6 @@ func (c *Client) CloneConfig() *Config { newConfig.CloneHeaders = c.config.CloneHeaders newConfig.CloneToken = c.config.CloneToken newConfig.ReadYourWrites = c.config.ReadYourWrites - newConfig.clientTLSConfig = c.config.clientTLSConfig // we specifically want a _copy_ of the client here, not a pointer to the original one newClient := *c.config.HttpClient @@ -1376,7 +1364,6 @@ START: LastOutputPolicyError = &OutputPolicyError{ method: req.Method, path: strings.TrimPrefix(req.URL.Path, "/v1"), - params: req.URL.Query(), } return nil, LastOutputPolicyError } diff --git a/api/client_test.go b/api/client_test.go index a23c0c19e7d58d..feb3ca6e879e09 100644 --- a/api/client_test.go +++ b/api/client_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/go.mod b/api/go.mod index 20fb4617af23b2..62288bd371f033 100644 --- a/api/go.mod +++ b/api/go.mod @@ -1,15 +1,11 @@ module github.com/hashicorp/vault/api -// The Go version directive for the api package should normally only be updated when -// code in the api package requires a newer Go version to build. It should not -// automatically track the Go version used to build Vault itself. Many projects import -// the api module and we don't want to impose a newer version on them any more than we -// have to. go 1.19 +replace github.com/hashicorp/vault/sdk => ../sdk + require ( github.com/cenkalti/backoff/v3 v3.0.0 - github.com/go-jose/go-jose/v3 v3.0.0 github.com/go-test/deep v1.0.2 github.com/hashicorp/errwrap v1.1.0 github.com/hashicorp/go-cleanhttp v0.5.2 @@ -23,6 +19,7 @@ require ( github.com/mitchellh/mapstructure v1.5.0 golang.org/x/net v0.7.0 golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 + gopkg.in/square/go-jose.v2 v2.5.1 ) require ( diff --git a/api/go.sum b/api/go.sum index e8f5f1811f8f4e..808c56f7fe08a8 100644 --- a/api/go.sum +++ b/api/go.sum @@ -7,11 +7,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -63,31 +60,26 @@ github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkB github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/api/help.go b/api/help.go index c119f6c3c9535f..0988ebcd1fc9db 100644 --- a/api/help.go +++ b/api/help.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/kv.go b/api/kv.go index 20862fbfdf1d9d..37699df266f9f4 100644 --- a/api/kv.go +++ b/api/kv.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import "errors" diff --git a/api/kv_test.go b/api/kv_test.go index 36d769feaa6b6c..f8b3d3917be491 100644 --- a/api/kv_test.go +++ b/api/kv_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/kv_v1.go b/api/kv_v1.go index a914e03576e869..22ba992384b79f 100644 --- a/api/kv_v1.go +++ b/api/kv_v1.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/kv_v2.go b/api/kv_v2.go index 72c29eaa42612e..335c21001be2fe 100644 --- a/api/kv_v2.go +++ b/api/kv_v2.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/lifetime_watcher.go b/api/lifetime_watcher.go index 5c060e5a150997..5f90de00a20d41 100644 --- a/api/lifetime_watcher.go +++ b/api/lifetime_watcher.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( @@ -150,13 +147,6 @@ func (c *Client) NewLifetimeWatcher(i *LifetimeWatcherInput) (*LifetimeWatcher, random := i.Rand if random == nil { - // NOTE: - // Rather than a cryptographically secure random number generator (RNG), - // the default behavior uses the math/rand package. The random number is - // used to introduce a slight jitter when calculating the grace period - // for a monitored secret monitoring. This is intended to stagger renewal - // requests to the Vault server, but in a semi-predictable way, so there - // is no need to use a cryptographically secure RNG. random = rand.New(rand.NewSource(int64(time.Now().Nanosecond()))) } @@ -347,14 +337,24 @@ func (r *LifetimeWatcher) doRenewWithOptions(tokenMode bool, nonRenewable bool, var sleepDuration time.Duration - if errorBackoff == nil { - sleepDuration = r.calculateSleepDuration(remainingLeaseDuration, priorDuration) - } else if errorBackoff.NextBackOff() == backoff.Stop { - return err - } + if errorBackoff != nil { + sleepDuration = errorBackoff.NextBackOff() + if sleepDuration == backoff.Stop { + return err + } + } else { + // We keep evaluating a new grace period so long as the lease is + // extending. Once it stops extending, we've hit the max and need to + // rely on the grace duration. + if remainingLeaseDuration > priorDuration { + r.calculateGrace(remainingLeaseDuration, time.Duration(r.increment)*time.Second) + } + priorDuration = remainingLeaseDuration - // remainingLeaseDuration becomes the priorDuration for the next loop - priorDuration = remainingLeaseDuration + // The sleep duration is set to 2/3 of the current lease duration plus + // 1/3 of the current grace period, which adds jitter. + sleepDuration = time.Duration(float64(remainingLeaseDuration.Nanoseconds())*2/3 + float64(r.grace.Nanoseconds())/3) + } // If we are within grace, return now; or, if the amount of time we // would sleep would land us in the grace period. This helps with short @@ -377,21 +377,6 @@ func (r *LifetimeWatcher) doRenewWithOptions(tokenMode bool, nonRenewable bool, } } -// calculateSleepDuration calculates the amount of time the LifeTimeWatcher should sleep -// before re-entering its loop. -func (r *LifetimeWatcher) calculateSleepDuration(remainingLeaseDuration, priorDuration time.Duration) time.Duration { - // We keep evaluating a new grace period so long as the lease is - // extending. Once it stops extending, we've hit the max and need to - // rely on the grace duration. - if remainingLeaseDuration > priorDuration { - r.calculateGrace(remainingLeaseDuration, time.Duration(r.increment)*time.Second) - } - - // The sleep duration is set to 2/3 of the current lease duration plus - // 1/3 of the current grace period, which adds jitter. - return time.Duration(float64(remainingLeaseDuration.Nanoseconds())*2/3 + float64(r.grace.Nanoseconds())/3) -} - // calculateGrace calculates the grace period based on the minimum of the // remaining lease duration and the token increment value; it also adds some // jitter to not have clients be in sync. diff --git a/api/logical.go b/api/logical.go index 927dd168e4404a..2c453897715528 100644 --- a/api/logical.go +++ b/api/logical.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/output_policy.go b/api/output_policy.go index c3ec522891b50f..85d1617e5e943d 100644 --- a/api/output_policy.go +++ b/api/output_policy.go @@ -1,13 +1,9 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( "fmt" "net/http" "net/url" - "strconv" "strings" ) @@ -20,7 +16,6 @@ var LastOutputPolicyError *OutputPolicyError type OutputPolicyError struct { method string path string - params url.Values finalHCLString string } @@ -49,22 +44,8 @@ func (d *OutputPolicyError) HCLString() (string, error) { // Builds a sample policy document from the request func (d *OutputPolicyError) buildSamplePolicy() (string, error) { - operation := d.method - // List is often defined as a URL param instead of as an http.Method - // this will check for the header and properly switch off of the intended functionality - if d.params.Has("list") { - isList, err := strconv.ParseBool(d.params.Get("list")) - if err != nil { - return "", fmt.Errorf("the value of the list url param is not a bool: %v", err) - } - - if isList { - operation = "LIST" - } - } - var capabilities []string - switch operation { + switch d.method { case http.MethodGet, "": capabilities = append(capabilities, "read") case http.MethodPost, http.MethodPut: @@ -78,15 +59,17 @@ func (d *OutputPolicyError) buildSamplePolicy() (string, error) { capabilities = append(capabilities, "list") } + // sanitize, then trim the Vault address and v1 from the front of the path + path, err := url.PathUnescape(d.path) + if err != nil { + return "", fmt.Errorf("failed to unescape request URL characters: %v", err) + } + // determine whether to add sudo capability - if IsSudoPath(d.path) { + if IsSudoPath(path) { capabilities = append(capabilities, "sudo") } - return formatOutputPolicy(d.path, capabilities), nil -} - -func formatOutputPolicy(path string, capabilities []string) string { // the OpenAPI response has a / in front of each path, // but policies need the path without that leading slash path = strings.TrimLeft(path, "/") @@ -95,5 +78,5 @@ func formatOutputPolicy(path string, capabilities []string) string { return fmt.Sprintf( `path "%s" { capabilities = ["%s"] -}`, path, capStr) +}`, path, capStr), nil } diff --git a/api/output_policy_test.go b/api/output_policy_test.go deleted file mode 100644 index 2092e2ba2a0165..00000000000000 --- a/api/output_policy_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package api - -import ( - "net/http" - "net/url" - "testing" -) - -func TestBuildSamplePolicy(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - req *OutputPolicyError - expected string - err error - }{ - { - "happy path", - &OutputPolicyError{ - method: http.MethodGet, - path: "/something", - }, - formatOutputPolicy("/something", []string{"read"}), - nil, - }, - { // test included to clear up some confusion around the sanitize comment - "demonstrate that this function does not format fully", - &OutputPolicyError{ - method: http.MethodGet, - path: "http://vault.test/v1/something", - }, - formatOutputPolicy("http://vault.test/v1/something", []string{"read"}), - nil, - }, - { // test that list is properly returned - "list over read returned", - &OutputPolicyError{ - method: http.MethodGet, - path: "/something", - params: url.Values{ - "list": []string{"true"}, - }, - }, - formatOutputPolicy("/something", []string{"list"}), - nil, - }, - { - "valid protected path", - &OutputPolicyError{ - method: http.MethodGet, - path: "/sys/config/ui/headers/", - }, - formatOutputPolicy("/sys/config/ui/headers/", []string{"read", "sudo"}), - nil, - }, - { // ensure that a formatted path that trims the trailing slash as the code does still works for recognizing a sudo path - "valid protected path no trailing /", - &OutputPolicyError{ - method: http.MethodGet, - path: "/sys/config/ui/headers", - }, - formatOutputPolicy("/sys/config/ui/headers", []string{"read", "sudo"}), - nil, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - result, err := tc.req.buildSamplePolicy() - if tc.err != err { - t.Fatalf("expected for the error to be %v instead got %v\n", tc.err, err) - } - - if tc.expected != result { - t.Fatalf("expected for the policy string to be %v instead got %v\n", tc.expected, result) - } - }) - } -} diff --git a/api/output_string.go b/api/output_string.go index d7777712d20950..80c591f20b5c75 100644 --- a/api/output_string.go +++ b/api/output_string.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/plugin_helpers.go b/api/plugin_helpers.go index 507b72c4c20de7..6602a044bd0a18 100644 --- a/api/plugin_helpers.go +++ b/api/plugin_helpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( @@ -14,7 +11,7 @@ import ( "os" "regexp" - "github.com/go-jose/go-jose/v3/jwt" + squarejwt "gopkg.in/square/go-jose.v2/jwt" "github.com/hashicorp/errwrap" ) @@ -40,7 +37,7 @@ const ( // path matches that path or not (useful specifically for the paths that // contain templated fields.) var sudoPaths = map[string]*regexp.Regexp{ - "/auth/token/accessors/": regexp.MustCompile(`^/auth/token/accessors/?$`), + "/auth/token/accessors/": regexp.MustCompile(`^/auth/token/accessors/$`), "/pki/root": regexp.MustCompile(`^/pki/root$`), "/pki/root/sign-self-issued": regexp.MustCompile(`^/pki/root/sign-self-issued$`), "/sys/audit": regexp.MustCompile(`^/sys/audit$`), @@ -50,10 +47,10 @@ var sudoPaths = map[string]*regexp.Regexp{ "/sys/config/auditing/request-headers": regexp.MustCompile(`^/sys/config/auditing/request-headers$`), "/sys/config/auditing/request-headers/{header}": regexp.MustCompile(`^/sys/config/auditing/request-headers/.+$`), "/sys/config/cors": regexp.MustCompile(`^/sys/config/cors$`), - "/sys/config/ui/headers/": regexp.MustCompile(`^/sys/config/ui/headers/?$`), + "/sys/config/ui/headers/": regexp.MustCompile(`^/sys/config/ui/headers/$`), "/sys/config/ui/headers/{header}": regexp.MustCompile(`^/sys/config/ui/headers/.+$`), "/sys/leases": regexp.MustCompile(`^/sys/leases$`), - "/sys/leases/lookup/": regexp.MustCompile(`^/sys/leases/lookup/?$`), + "/sys/leases/lookup/": regexp.MustCompile(`^/sys/leases/lookup/$`), "/sys/leases/lookup/{prefix}": regexp.MustCompile(`^/sys/leases/lookup/.+$`), "/sys/leases/revoke-force/{prefix}": regexp.MustCompile(`^/sys/leases/revoke-force/.+$`), "/sys/leases/revoke-prefix/{prefix}": regexp.MustCompile(`^/sys/leases/revoke-prefix/.+$`), @@ -73,7 +70,7 @@ var sudoPaths = map[string]*regexp.Regexp{ "/sys/replication/performance/primary/secondary-token": regexp.MustCompile(`^/sys/replication/performance/primary/secondary-token$`), "/sys/replication/primary/secondary-token": regexp.MustCompile(`^/sys/replication/primary/secondary-token$`), "/sys/replication/reindex": regexp.MustCompile(`^/sys/replication/reindex$`), - "/sys/storage/raft/snapshot-auto/config/": regexp.MustCompile(`^/sys/storage/raft/snapshot-auto/config/?$`), + "/sys/storage/raft/snapshot-auto/config/": regexp.MustCompile(`^/sys/storage/raft/snapshot-auto/config/$`), "/sys/storage/raft/snapshot-auto/config/{name}": regexp.MustCompile(`^/sys/storage/raft/snapshot-auto/config/[^/]+$`), } @@ -135,7 +132,7 @@ func VaultPluginTLSProviderContext(ctx context.Context, apiTLSConfig *TLSConfig) return func() (*tls.Config, error) { unwrapToken := os.Getenv(PluginUnwrapTokenEnv) - parsedJWT, err := jwt.ParseSigned(unwrapToken) + parsedJWT, err := squarejwt.ParseSigned(unwrapToken) if err != nil { return nil, errwrap.Wrapf("error parsing wrapping token: {{err}}", err) } diff --git a/api/plugin_helpers_test.go b/api/plugin_helpers_test.go index 7b3ddbf8154af0..453720ea7a5aac 100644 --- a/api/plugin_helpers_test.go +++ b/api/plugin_helpers_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import "testing" diff --git a/api/plugin_types.go b/api/plugin_types.go index 4c759a2decc510..3b85013b7eb443 100644 --- a/api/plugin_types.go +++ b/api/plugin_types.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api // NOTE: this file was copied from diff --git a/api/renewer_test.go b/api/renewer_test.go index 7ba16e66eca24a..3b28d8546d42d6 100644 --- a/api/renewer_test.go +++ b/api/renewer_test.go @@ -1,15 +1,9 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( "errors" "fmt" - "math/rand" - "reflect" "testing" - "testing/quick" "time" "github.com/go-test/deep" @@ -239,47 +233,3 @@ func TestLifetimeWatcher(t *testing.T) { }) } } - -// TestCalcSleepPeriod uses property based testing to evaluate the calculateSleepDuration -// function of LifeTimeWatchers, but also incidentally tests "calculateGrace". -// This is on account of "calculateSleepDuration" performing the "calculateGrace" -// function in particular instances. -// Both of these functions support the vital functionality of the LifeTimeWatcher -// and therefore should be tested rigorously. -func TestCalcSleepPeriod(t *testing.T) { - c := quick.Config{ - MaxCount: 10000, - Values: func(values []reflect.Value, r *rand.Rand) { - leaseDuration := r.Int63() - priorDuration := r.Int63n(leaseDuration) - remainingLeaseDuration := r.Int63n(priorDuration) - increment := r.Int63n(remainingLeaseDuration) - - values[0] = reflect.ValueOf(r) - values[1] = reflect.ValueOf(time.Duration(leaseDuration)) - values[2] = reflect.ValueOf(time.Duration(priorDuration)) - values[3] = reflect.ValueOf(time.Duration(remainingLeaseDuration)) - values[4] = reflect.ValueOf(time.Duration(increment)) - }, - } - - // tests that "calculateSleepDuration" will always return a value less than - // the remaining lease duration given a random leaseDuration, priorDuration, remainingLeaseDuration, and increment. - // Inputs are generated so that: - // leaseDuration > priorDuration > remainingLeaseDuration - // and remainingLeaseDuration > increment - if err := quick.Check(func(r *rand.Rand, leaseDuration, priorDuration, remainingLeaseDuration, increment time.Duration) bool { - lw := LifetimeWatcher{ - grace: 0, - increment: int(increment.Seconds()), - random: r, - } - - lw.calculateGrace(remainingLeaseDuration, increment) - - // ensure that we sleep for less than the remaining lease. - return lw.calculateSleepDuration(remainingLeaseDuration, priorDuration) < remainingLeaseDuration - }, &c); err != nil { - t.Error(err) - } -} diff --git a/api/replication_status.go b/api/replication_status.go deleted file mode 100644 index 1668daf19c127e..00000000000000 --- a/api/replication_status.go +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package api - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - - "github.com/mitchellh/mapstructure" -) - -const ( - apiRepPerformanceStatusPath = "/v1/sys/replication/performance/status" - apiRepDRStatusPath = "/v1/sys/replication/dr/status" - apiRepStatusPath = "/v1/sys/replication/status" -) - -type ClusterInfo struct { - APIAddr string `json:"api_address,omitempty" mapstructure:"api_address"` - ClusterAddress string `json:"cluster_address,omitempty" mapstructure:"cluster_address"` - ConnectionStatus string `json:"connection_status,omitempty" mapstructure:"connection_status"` - LastHeartBeat string `json:"last_heartbeat,omitempty" mapstructure:"last_heartbeat"` - NodeID string `json:"node_id,omitempty" mapstructure:"node_id"` -} - -type ReplicationStatusGenericResponse struct { - LastDRWAL uint64 `json:"last_dr_wal,omitempty" mapstructure:"last_dr_wal"` - LastReindexEpoch string `json:"last_reindex_epoch,omitempty" mapstructure:"last_reindex_epoch"` - ClusterID string `json:"cluster_id,omitempty" mapstructure:"cluster_id"` - LastWAL uint64 `json:"last_wal,omitempty" mapstructure:"last_wal"` - MerkleRoot string `json:"merkle_root,omitempty" mapstructure:"merkle_root"` - Mode string `json:"mode,omitempty" mapstructure:"mode"` - PrimaryClusterAddr string `json:"primary_cluster_addr,omitempty" mapstructure:"primary_cluster_addr"` - LastPerformanceWAL uint64 `json:"last_performance_wal,omitempty" mapstructure:"last_performance_wal"` - State string `json:"state,omitempty" mapstructure:"state"` - LastRemoteWAL uint64 `json:"last_remote_wal,omitempty" mapstructure:"last_remote_wal"` - SecondaryID string `json:"secondary_id,omitempty" mapstructure:"secondary_id"` - SSCTGenerationCounter uint64 `json:"ssct_generation_counter,omitempty" mapstructure:"ssct_generation_counter"` - - KnownSecondaries []string `json:"known_secondaries,omitempty" mapstructure:"known_secondaries"` - KnownPrimaryClusterAddrs []string `json:"known_primary_cluster_addrs,omitempty" mapstructure:"known_primary_cluster_addrs"` - Primaries []ClusterInfo `json:"primaries,omitempty" mapstructure:"primaries"` - Secondaries []ClusterInfo `json:"secondaries,omitempty" mapstructure:"secondaries"` -} - -type ReplicationStatusResponse struct { - DR ReplicationStatusGenericResponse `json:"dr,omitempty" mapstructure:"dr"` - Performance ReplicationStatusGenericResponse `json:"performance,omitempty" mapstructure:"performance"` -} - -func (c *Sys) ReplicationStatus() (*ReplicationStatusResponse, error) { - return c.ReplicationStatusWithContext(context.Background(), apiRepStatusPath) -} - -func (c *Sys) ReplicationPerformanceStatusWithContext(ctx context.Context) (*ReplicationStatusGenericResponse, error) { - s, err := c.ReplicationStatusWithContext(ctx, apiRepPerformanceStatusPath) - if err != nil { - return nil, err - } - - return &s.Performance, nil -} - -func (c *Sys) ReplicationDRStatusWithContext(ctx context.Context) (*ReplicationStatusGenericResponse, error) { - s, err := c.ReplicationStatusWithContext(ctx, apiRepDRStatusPath) - if err != nil { - return nil, err - } - - return &s.DR, nil -} - -func (c *Sys) ReplicationStatusWithContext(ctx context.Context, path string) (*ReplicationStatusResponse, error) { - // default to replication/status - if path == "" { - path = apiRepStatusPath - } - - ctx, cancelFunc := c.c.withConfiguredTimeout(ctx) - defer cancelFunc() - - r := c.c.NewRequest(http.MethodGet, path) - - resp, err := c.c.rawRequestWithContext(ctx, r) - if err != nil { - return nil, err - } - defer func() { _ = resp.Body.Close() }() - - // First decode response into a map[string]interface{} - data := make(map[string]interface{}) - dec := json.NewDecoder(resp.Body) - dec.UseNumber() - if err := dec.Decode(&data); err != nil { - return nil, err - } - - rawData, ok := data["data"] - if !ok { - return nil, fmt.Errorf("empty data in replication status response") - } - - s := &ReplicationStatusResponse{} - g := &ReplicationStatusGenericResponse{} - switch { - case path == apiRepPerformanceStatusPath: - err = mapstructure.Decode(rawData, g) - if err != nil { - return nil, err - } - s.Performance = *g - case path == apiRepDRStatusPath: - err = mapstructure.Decode(rawData, g) - if err != nil { - return nil, err - } - s.DR = *g - default: - err = mapstructure.Decode(rawData, s) - if err != nil { - return nil, err - } - return s, err - } - - return s, err -} diff --git a/api/request.go b/api/request.go index ecf783701ad4b2..a8e53c01e47112 100644 --- a/api/request.go +++ b/api/request.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/request_test.go b/api/request_test.go index ac21b801987288..f2657e61c50389 100644 --- a/api/request_test.go +++ b/api/request_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/response.go b/api/response.go index 2842c125514a4e..a0e31144e562a1 100644 --- a/api/response.go +++ b/api/response.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/secret.go b/api/secret.go index 3d15f7a806a6e3..2891b11c43fb54 100644 --- a/api/secret.go +++ b/api/secret.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/ssh.go b/api/ssh.go index 28510eecc23f3e..b832e274829097 100644 --- a/api/ssh.go +++ b/api/ssh.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/ssh_agent.go b/api/ssh_agent.go index e61503772fa3cd..c67b80dc4c1efd 100644 --- a/api/ssh_agent.go +++ b/api/ssh_agent.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/ssh_agent_test.go b/api/ssh_agent_test.go index 38117e42a70624..d233b09c476cf7 100644 --- a/api/ssh_agent_test.go +++ b/api/ssh_agent_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys.go b/api/sys.go index 81ebb3a2509f24..5fb111887c0d38 100644 --- a/api/sys.go +++ b/api/sys.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api // Sys is used to perform system-related operations on Vault. diff --git a/api/sys_audit.go b/api/sys_audit.go index 2244087aad5889..82d9aab0b7a013 100644 --- a/api/sys_audit.go +++ b/api/sys_audit.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_auth.go b/api/sys_auth.go index e814412191f356..238bd5e468a023 100644 --- a/api/sys_auth.go +++ b/api/sys_auth.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_capabilities.go b/api/sys_capabilities.go index 6310d42fcf46c1..af306a07f3126a 100644 --- a/api/sys_capabilities.go +++ b/api/sys_capabilities.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_config_cors.go b/api/sys_config_cors.go index e80aa9d8b41756..1e2cda4f48cbf7 100644 --- a/api/sys_config_cors.go +++ b/api/sys_config_cors.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_generate_root.go b/api/sys_generate_root.go index da4ad2f9b73ba7..096cadb793d90c 100644 --- a/api/sys_generate_root.go +++ b/api/sys_generate_root.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_hastatus.go b/api/sys_hastatus.go index 2b2aa7c3e98063..d89d59651a92f8 100644 --- a/api/sys_hastatus.go +++ b/api/sys_hastatus.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_health.go b/api/sys_health.go index 13fd8d4d37432d..953c1c21eaa3cd 100644 --- a/api/sys_health.go +++ b/api/sys_health.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_init.go b/api/sys_init.go index 13fa94806976b6..05dea86f6ab5ce 100644 --- a/api/sys_init.go +++ b/api/sys_init.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_leader.go b/api/sys_leader.go index 868914d3b13943..a74e206ebed45f 100644 --- a/api/sys_leader.go +++ b/api/sys_leader.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_leases.go b/api/sys_leases.go index c46f07e64b41f6..c02402f5314c34 100644 --- a/api/sys_leases.go +++ b/api/sys_leases.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_mfa.go b/api/sys_mfa.go index 2be66958464865..a1ba1bd80f9442 100644 --- a/api/sys_mfa.go +++ b/api/sys_mfa.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_monitor.go b/api/sys_monitor.go index 15a8a13d175c91..405d40f8efcb21 100644 --- a/api/sys_monitor.go +++ b/api/sys_monitor.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_mounts.go b/api/sys_mounts.go index a6c2a0f5412e64..f55133cec4c64a 100644 --- a/api/sys_mounts.go +++ b/api/sys_mounts.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_mounts_test.go b/api/sys_mounts_test.go index a810c6268a1c74..d461a9d495cf82 100644 --- a/api/sys_mounts_test.go +++ b/api/sys_mounts_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_plugins.go b/api/sys_plugins.go index 2ee024d9defc83..05dce293998909 100644 --- a/api/sys_plugins.go +++ b/api/sys_plugins.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_plugins_test.go b/api/sys_plugins_test.go index 3673181472a7d8..98078b7c3aab7e 100644 --- a/api/sys_plugins_test.go +++ b/api/sys_plugins_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_policy.go b/api/sys_policy.go index 9ddffe4ec7c4a2..4a4f91b08c71e9 100644 --- a/api/sys_policy.go +++ b/api/sys_policy.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_raft.go b/api/sys_raft.go index 29bfed0f5613b9..7806a1418df832 100644 --- a/api/sys_raft.go +++ b/api/sys_raft.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_rekey.go b/api/sys_rekey.go index 573201751c7b9a..2ac8a4743bcfd0 100644 --- a/api/sys_rekey.go +++ b/api/sys_rekey.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_rotate.go b/api/sys_rotate.go index 295d989f9e2a14..fa86886c35b8eb 100644 --- a/api/sys_rotate.go +++ b/api/sys_rotate.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_seal.go b/api/sys_seal.go index 7a9c5621ed194b..0522f2a42b765a 100644 --- a/api/sys_seal.go +++ b/api/sys_seal.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/sys_stepdown.go b/api/sys_stepdown.go index c55ed1e666db9d..833f31a6f7602d 100644 --- a/api/sys_stepdown.go +++ b/api/sys_stepdown.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package api import ( diff --git a/api/test-fixtures/agent_config.hcl b/api/test-fixtures/agent_config.hcl index 38d8026057f52c..8339f53d7eaad2 100644 --- a/api/test-fixtures/agent_config.hcl +++ b/api/test-fixtures/agent_config.hcl @@ -1,5 +1,2 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - vault_addr="http://127.0.0.1:8200" ssh_mount_point="ssh" diff --git a/audit/audit.go b/audit/audit.go index 35a3d38a0558d1..5641b449af3090 100644 --- a/audit/audit.go +++ b/audit/audit.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/audit/format.go b/audit/format.go index d595f2fd174017..c8b43d52be6638 100644 --- a/audit/format.go +++ b/audit/format.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( @@ -11,7 +8,7 @@ import ( "strings" "time" - "github.com/go-jose/go-jose/v3/jwt" + squarejwt "gopkg.in/square/go-jose.v2/jwt" "github.com/hashicorp/vault/helper/namespace" "github.com/hashicorp/vault/sdk/helper/salt" @@ -92,9 +89,9 @@ func (f *AuditFormatter) FormatRequest(ctx context.Context, w io.Writer, config reqType = "request" } reqEntry := &AuditRequestEntry{ - Type: reqType, - Error: errString, - ForwardedFrom: req.ForwardedFrom, + Type: reqType, + Error: errString, + Auth: &AuditAuth{ ClientToken: auth.ClientToken, Accessor: auth.Accessor, @@ -112,18 +109,14 @@ func (f *AuditFormatter) FormatRequest(ctx context.Context, w io.Writer, config }, Request: &AuditRequest{ - ID: req.ID, - ClientID: req.ClientID, - ClientToken: req.ClientToken, - ClientTokenAccessor: req.ClientTokenAccessor, - Operation: req.Operation, - MountPoint: req.MountPoint, - MountType: req.MountType, - MountAccessor: req.MountAccessor, - MountRunningVersion: req.MountRunningVersion(), - MountRunningSha256: req.MountRunningSha256(), - MountIsExternalPlugin: req.MountIsExternalPlugin(), - MountClass: req.MountClass(), + ID: req.ID, + ClientID: req.ClientID, + ClientToken: req.ClientToken, + ClientTokenAccessor: req.ClientTokenAccessor, + Operation: req.Operation, + MountPoint: req.MountPoint, + MountType: req.MountType, + MountAccessor: req.MountAccessor, Namespace: &AuditNamespace{ ID: ns.ID, Path: ns.Path, @@ -297,9 +290,8 @@ func (f *AuditFormatter) FormatResponse(ctx context.Context, w io.Writer, config respType = "response" } respEntry := &AuditResponseEntry{ - Type: respType, - Error: errString, - Forwarded: req.ForwardedFrom != "", + Type: respType, + Error: errString, Auth: &AuditAuth{ ClientToken: auth.ClientToken, Accessor: auth.Accessor, @@ -318,18 +310,14 @@ func (f *AuditFormatter) FormatResponse(ctx context.Context, w io.Writer, config }, Request: &AuditRequest{ - ID: req.ID, - ClientToken: req.ClientToken, - ClientTokenAccessor: req.ClientTokenAccessor, - ClientID: req.ClientID, - Operation: req.Operation, - MountPoint: req.MountPoint, - MountType: req.MountType, - MountAccessor: req.MountAccessor, - MountRunningVersion: req.MountRunningVersion(), - MountRunningSha256: req.MountRunningSha256(), - MountIsExternalPlugin: req.MountIsExternalPlugin(), - MountClass: req.MountClass(), + ID: req.ID, + ClientToken: req.ClientToken, + ClientTokenAccessor: req.ClientTokenAccessor, + ClientID: req.ClientID, + Operation: req.Operation, + MountPoint: req.MountPoint, + MountType: req.MountType, + MountAccessor: req.MountAccessor, Namespace: &AuditNamespace{ ID: ns.ID, Path: ns.Path, @@ -345,20 +333,16 @@ func (f *AuditFormatter) FormatResponse(ctx context.Context, w io.Writer, config }, Response: &AuditResponse{ - MountPoint: req.MountPoint, - MountType: req.MountType, - MountAccessor: req.MountAccessor, - MountRunningVersion: req.MountRunningVersion(), - MountRunningSha256: req.MountRunningSha256(), - MountIsExternalPlugin: req.MountIsExternalPlugin(), - MountClass: req.MountClass(), - Auth: respAuth, - Secret: respSecret, - Data: respData, - Warnings: resp.Warnings, - Redirect: resp.Redirect, - WrapInfo: respWrapInfo, - Headers: resp.Headers, + MountPoint: req.MountPoint, + MountType: req.MountType, + MountAccessor: req.MountAccessor, + Auth: respAuth, + Secret: respSecret, + Data: respData, + Warnings: resp.Warnings, + Redirect: resp.Redirect, + WrapInfo: respWrapInfo, + Headers: resp.Headers, }, } @@ -393,23 +377,21 @@ func (f *AuditFormatter) FormatResponse(ctx context.Context, w io.Writer, config // AuditRequestEntry is the structure of a request audit log entry in Audit. type AuditRequestEntry struct { - Time string `json:"time,omitempty"` - Type string `json:"type,omitempty"` - Auth *AuditAuth `json:"auth,omitempty"` - Request *AuditRequest `json:"request,omitempty"` - Error string `json:"error,omitempty"` - ForwardedFrom string `json:"forwarded_from,omitempty"` // Populated in Enterprise when a request is forwarded + Time string `json:"time,omitempty"` + Type string `json:"type,omitempty"` + Auth *AuditAuth `json:"auth,omitempty"` + Request *AuditRequest `json:"request,omitempty"` + Error string `json:"error,omitempty"` } // AuditResponseEntry is the structure of a response audit log entry in Audit. type AuditResponseEntry struct { - Time string `json:"time,omitempty"` - Type string `json:"type,omitempty"` - Auth *AuditAuth `json:"auth,omitempty"` - Request *AuditRequest `json:"request,omitempty"` - Response *AuditResponse `json:"response,omitempty"` - Error string `json:"error,omitempty"` - Forwarded bool `json:"forwarded,omitempty"` + Time string `json:"time,omitempty"` + Type string `json:"type,omitempty"` + Auth *AuditAuth `json:"auth,omitempty"` + Request *AuditRequest `json:"request,omitempty"` + Response *AuditResponse `json:"response,omitempty"` + Error string `json:"error,omitempty"` } type AuditRequest struct { @@ -420,10 +402,6 @@ type AuditRequest struct { MountPoint string `json:"mount_point,omitempty"` MountType string `json:"mount_type,omitempty"` MountAccessor string `json:"mount_accessor,omitempty"` - MountRunningVersion string `json:"mount_running_version,omitempty"` - MountRunningSha256 string `json:"mount_running_sha256,omitempty"` - MountClass string `json:"mount_class,omitempty"` - MountIsExternalPlugin bool `json:"mount_is_external_plugin,omitempty"` ClientToken string `json:"client_token,omitempty"` ClientTokenAccessor string `json:"client_token_accessor,omitempty"` Namespace *AuditNamespace `json:"namespace,omitempty"` @@ -438,20 +416,16 @@ type AuditRequest struct { } type AuditResponse struct { - Auth *AuditAuth `json:"auth,omitempty"` - MountPoint string `json:"mount_point,omitempty"` - MountType string `json:"mount_type,omitempty"` - MountAccessor string `json:"mount_accessor,omitempty"` - MountRunningVersion string `json:"mount_running_plugin_version,omitempty"` - MountRunningSha256 string `json:"mount_running_sha256,omitempty"` - MountClass string `json:"mount_class,omitempty"` - MountIsExternalPlugin bool `json:"mount_is_external_plugin,omitempty"` - Secret *AuditSecret `json:"secret,omitempty"` - Data map[string]interface{} `json:"data,omitempty"` - Warnings []string `json:"warnings,omitempty"` - Redirect string `json:"redirect,omitempty"` - WrapInfo *AuditResponseWrapInfo `json:"wrap_info,omitempty"` - Headers map[string][]string `json:"headers,omitempty"` + Auth *AuditAuth `json:"auth,omitempty"` + MountPoint string `json:"mount_point,omitempty"` + MountType string `json:"mount_type,omitempty"` + MountAccessor string `json:"mount_accessor,omitempty"` + Secret *AuditSecret `json:"secret,omitempty"` + Data map[string]interface{} `json:"data,omitempty"` + Warnings []string `json:"warnings,omitempty"` + Redirect string `json:"redirect,omitempty"` + WrapInfo *AuditResponseWrapInfo `json:"wrap_info,omitempty"` + Headers map[string][]string `json:"headers,omitempty"` } type AuditAuth struct { @@ -535,12 +509,12 @@ func parseVaultTokenFromJWT(token string) *string { return nil } - parsedJWT, err := jwt.ParseSigned(token) + parsedJWT, err := squarejwt.ParseSigned(token) if err != nil { return nil } - var claims jwt.Claims + var claims squarejwt.Claims if err = parsedJWT.UnsafeClaimsWithoutVerification(&claims); err != nil { return nil } diff --git a/audit/format_json.go b/audit/format_json.go index 74f4138184a464..4003c05a721789 100644 --- a/audit/format_json.go +++ b/audit/format_json.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/audit/format_json_test.go b/audit/format_json_test.go index fa31cde83bea60..e4a703d12ad42d 100644 --- a/audit/format_json_test.go +++ b/audit/format_json_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/audit/format_jsonx.go b/audit/format_jsonx.go index 20352a2deadeb3..bff244099a9ab4 100644 --- a/audit/format_jsonx.go +++ b/audit/format_jsonx.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/audit/format_jsonx_test.go b/audit/format_jsonx_test.go index fb6046195187ef..00921c0c71a95d 100644 --- a/audit/format_jsonx_test.go +++ b/audit/format_jsonx_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/audit/format_test.go b/audit/format_test.go index 5395d916cc8164..bc0610c373f8cd 100644 --- a/audit/format_test.go +++ b/audit/format_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/audit/formatter.go b/audit/formatter.go index 98c393c3b8170b..df82057e39b5b4 100644 --- a/audit/formatter.go +++ b/audit/formatter.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/audit/hashstructure.go b/audit/hashstructure.go index cd4f8085d13d32..9040f8c2b5b480 100644 --- a/audit/hashstructure.go +++ b/audit/hashstructure.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/audit/hashstructure_test.go b/audit/hashstructure_test.go index c65931f7c5be27..3b080a5268cbbc 100644 --- a/audit/hashstructure_test.go +++ b/audit/hashstructure_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package audit import ( diff --git a/builtin/audit/file/backend.go b/builtin/audit/file/backend.go index 2c3ef3f8e07743..43dd26c25bd0c6 100644 --- a/builtin/audit/file/backend.go +++ b/builtin/audit/file/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package file import ( diff --git a/builtin/audit/file/backend_test.go b/builtin/audit/file/backend_test.go index ad082ace5d4325..817518c50bd85d 100644 --- a/builtin/audit/file/backend_test.go +++ b/builtin/audit/file/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package file import ( diff --git a/builtin/audit/socket/backend.go b/builtin/audit/socket/backend.go index 4c649e0e9c7485..be6f583fc53945 100644 --- a/builtin/audit/socket/backend.go +++ b/builtin/audit/socket/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package socket import ( diff --git a/builtin/audit/syslog/backend.go b/builtin/audit/syslog/backend.go index 2da92fe2e40e5b..1d3bce893cca4c 100644 --- a/builtin/audit/syslog/backend.go +++ b/builtin/audit/syslog/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package syslog import ( diff --git a/builtin/credential/approle/backend.go b/builtin/credential/approle/backend.go index 4165fbbec47eff..ebd8d3c06a8010 100644 --- a/builtin/credential/approle/backend.go +++ b/builtin/credential/approle/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( @@ -15,7 +12,6 @@ import ( ) const ( - operationPrefixAppRole = "app-role" secretIDPrefix = "secret_id/" secretIDLocalPrefix = "secret_id_local/" secretIDAccessorPrefix = "accessor/" diff --git a/builtin/credential/approle/backend_test.go b/builtin/credential/approle/backend_test.go index 683249e316dc64..212fe36f0f7c64 100644 --- a/builtin/credential/approle/backend_test.go +++ b/builtin/credential/approle/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( diff --git a/builtin/credential/approle/cmd/approle/main.go b/builtin/credential/approle/cmd/approle/main.go index 9000ea95810a97..22fa242fa623de 100644 --- a/builtin/credential/approle/cmd/approle/main.go +++ b/builtin/credential/approle/cmd/approle/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: approle.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/credential/approle/path_login.go b/builtin/credential/approle/path_login.go index a4c4fe82cde9a7..489ff638e2e4a3 100644 --- a/builtin/credential/approle/path_login.go +++ b/builtin/credential/approle/path_login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( @@ -19,10 +16,6 @@ import ( func pathLogin(b *backend) *framework.Path { return &framework.Path{ Pattern: "login$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationVerb: "login", - }, Fields: map[string]*framework.FieldSchema{ "role_id": { Type: framework.TypeString, diff --git a/builtin/credential/approle/path_login_test.go b/builtin/credential/approle/path_login_test.go index 5a09c6c4e3f423..9a7f57af4d8563 100644 --- a/builtin/credential/approle/path_login_test.go +++ b/builtin/credential/approle/path_login_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( @@ -9,6 +6,8 @@ import ( "testing" "time" + "github.com/hashicorp/vault/sdk/framework" + "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" "github.com/hashicorp/vault/sdk/logical" ) @@ -17,9 +16,11 @@ func TestAppRole_BoundCIDRLogin(t *testing.T) { var err error b, s := createBackendWithStorage(t) + paths := []*framework.Path{pathLogin(b)} + // Create a role with secret ID binding disabled and only bound cidr list // enabled - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole", Operation: logical.CreateOperation, Data: map[string]interface{}{ @@ -29,18 +30,24 @@ func TestAppRole_BoundCIDRLogin(t *testing.T) { }, Storage: s, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } // Read the role ID - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole/role-id", Operation: logical.ReadOperation, Storage: s, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } roleID := resp.Data["role_id"] // Fill in the connection information and login with just the role ID - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "login", Operation: logical.UpdateOperation, Data: map[string]interface{}{ @@ -49,7 +56,9 @@ func TestAppRole_BoundCIDRLogin(t *testing.T) { Storage: s, Connection: &logical.Connection{RemoteAddr: "127.0.0.1"}, }) - + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } if resp.Auth == nil { t.Fatal("expected login to succeed") } @@ -59,9 +68,15 @@ func TestAppRole_BoundCIDRLogin(t *testing.T) { if resp.Auth.BoundCIDRs[0].String() != "10.0.0.0/8" { t.Fatalf("bad: %s", resp.Auth.BoundCIDRs[0].String()) } + schema.ValidateResponse( + t, + schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), + resp, + true, + ) // Override with a secret-id value, verify it doesn't pass - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole", Operation: logical.UpdateOperation, Data: map[string]interface{}{ @@ -69,6 +84,9 @@ func TestAppRole_BoundCIDRLogin(t *testing.T) { }, Storage: s, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } roleSecretIDReq := &logical.Request{ Operation: logical.UpdateOperation, @@ -84,11 +102,13 @@ func TestAppRole_BoundCIDRLogin(t *testing.T) { } roleSecretIDReq.Data["token_bound_cidrs"] = "10.0.0.0/24" - resp = b.requestNoErr(t, roleSecretIDReq) - + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } secretID := resp.Data["secret_id"] - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "login", Operation: logical.UpdateOperation, Data: map[string]interface{}{ @@ -98,7 +118,9 @@ func TestAppRole_BoundCIDRLogin(t *testing.T) { Storage: s, Connection: &logical.Connection{RemoteAddr: "127.0.0.1"}, }) - + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } if resp.Auth == nil { t.Fatal("expected login to succeed") } @@ -108,6 +130,12 @@ func TestAppRole_BoundCIDRLogin(t *testing.T) { if resp.Auth.BoundCIDRs[0].String() != "10.0.0.0/24" { t.Fatalf("bad: %s", resp.Auth.BoundCIDRs[0].String()) } + schema.ValidateResponse( + t, + schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), + resp, + true, + ) } func TestAppRole_RoleLogin(t *testing.T) { @@ -115,14 +143,18 @@ func TestAppRole_RoleLogin(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := []*framework.Path{pathLogin(b)} + createRole(t, b, storage, "role1", "a,b,c") roleRoleIDReq := &logical.Request{ Operation: logical.ReadOperation, Path: "role/role1/role-id", Storage: storage, } - resp = b.requestNoErr(t, roleRoleIDReq) - + resp, err = b.HandleRequest(context.Background(), roleRoleIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } roleID := resp.Data["role_id"] roleSecretIDReq := &logical.Request{ @@ -130,8 +162,10 @@ func TestAppRole_RoleLogin(t *testing.T) { Path: "role/role1/secret-id", Storage: storage, } - resp = b.requestNoErr(t, roleSecretIDReq) - + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } secretID := resp.Data["secret_id"] loginData := map[string]interface{}{ @@ -172,6 +206,13 @@ func TestAppRole_RoleLogin(t *testing.T) { t.Fatalf("expected metadata.alias.role_name to equal 'role1', got: %v", val) } + schema.ValidateResponse( + t, + schema.FindResponseSchema(t, paths, 0, loginReq.Operation), + resp, + true, + ) + // Test renewal renewReq := generateRenewRequest(storage, loginResp.Auth) @@ -200,15 +241,20 @@ func TestAppRole_RoleLogin(t *testing.T) { Storage: storage, Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } roleRoleIDReq = &logical.Request{ Operation: logical.ReadOperation, Path: "role/role-period/role-id", Storage: storage, } - resp = b.requestNoErr(t, roleRoleIDReq) - + resp, err = b.HandleRequest(context.Background(), roleRoleIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } roleID = resp.Data["role_id"] roleSecretIDReq = &logical.Request{ @@ -216,8 +262,10 @@ func TestAppRole_RoleLogin(t *testing.T) { Path: "role/role-period/secret-id", Storage: storage, } - resp = b.requestNoErr(t, roleSecretIDReq) - + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } secretID = resp.Data["secret_id"] loginData["role_id"] = roleID @@ -280,8 +328,12 @@ func generateRenewRequest(s logical.Storage, auth *logical.Auth) *logical.Reques } func TestAppRole_RoleResolve(t *testing.T) { + var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := []*framework.Path{pathLogin(b)} + role := "role1" createRole(t, b, storage, role, "a,b,c") roleRoleIDReq := &logical.Request{ @@ -289,8 +341,10 @@ func TestAppRole_RoleResolve(t *testing.T) { Path: "role/role1/role-id", Storage: storage, } - resp := b.requestNoErr(t, roleRoleIDReq) - + resp, err = b.HandleRequest(context.Background(), roleRoleIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } roleID := resp.Data["role_id"] roleSecretIDReq := &logical.Request{ @@ -298,8 +352,10 @@ func TestAppRole_RoleResolve(t *testing.T) { Path: "role/role1/secret-id", Storage: storage, } - resp = b.requestNoErr(t, roleSecretIDReq) - + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } secretID := resp.Data["secret_id"] loginData := map[string]interface{}{ @@ -316,11 +372,21 @@ func TestAppRole_RoleResolve(t *testing.T) { }, } - resp = b.requestNoErr(t, loginReq) + resp, err = b.HandleRequest(context.Background(), loginReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } if resp.Data["role"] != role { t.Fatalf("Role was not as expected. Expected %s, received %s", role, resp.Data["role"]) } + + schema.ValidateResponse( + t, + schema.FindResponseSchema(t, paths, 0, loginReq.Operation), + resp, + true, + ) } func TestAppRole_RoleDoesNotExist(t *testing.T) { diff --git a/builtin/credential/approle/path_role.go b/builtin/credential/approle/path_role.go index 112d2e0f13d973..7759c07037bb15 100644 --- a/builtin/credential/approle/path_role.go +++ b/builtin/credential/approle/path_role.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( @@ -124,10 +121,6 @@ func rolePaths(b *backend) []*framework.Path { p := &framework.Path{ Pattern: "role/" + framework.GenericNameRegex("role_name"), - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "role", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -304,10 +297,6 @@ can only be set during role creation and once set, it can't be reset later.`, p, { Pattern: "role/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "roles", - }, Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathRoleList, @@ -329,10 +318,6 @@ can only be set during role creation and once set, it can't be reset later.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/local-secret-ids$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "local-secret-ids", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -361,10 +346,6 @@ can only be set during role creation and once set, it can't be reset later.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/policies$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "policies", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -416,10 +397,6 @@ can only be set during role creation and once set, it can't be reset later.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/bound-cidr-list$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "bound-cidr-list", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -462,10 +439,6 @@ of CIDR blocks. If set, specifies the blocks of IP addresses which can perform t }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/secret-id-bound-cidrs$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "secret-id-bound-cidrs", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -507,10 +480,6 @@ IP addresses which can perform the login operation.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/token-bound-cidrs$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "token-bound-cidrs", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -551,10 +520,6 @@ IP addresses which can perform the login operation.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/bind-secret-id$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "bind-secret-id", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -596,10 +561,6 @@ IP addresses which can perform the login operation.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/secret-id-num-uses$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "secret-id-num-uses", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -640,10 +601,6 @@ IP addresses which can perform the login operation.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/secret-id-ttl$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "secret-id-ttl", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -685,10 +642,6 @@ to 0, meaning no expiration.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/period$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "period", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -740,10 +693,6 @@ to 0, meaning no expiration.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/token-num-uses$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "token-num-uses", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -784,10 +733,6 @@ to 0, meaning no expiration.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/token-ttl$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "token-ttl", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -828,10 +773,6 @@ to 0, meaning no expiration.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/token-max-ttl$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "token-max-ttl", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -872,10 +813,6 @@ to 0, meaning no expiration.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/role-id$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "role-id", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -912,10 +849,6 @@ to 0, meaning no expiration.`, }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/secret-id/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "secret-id", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -981,9 +914,6 @@ Overrides secret_id_ttl role option when supplied. May not be longer than role's }, logical.ListOperation: &framework.PathOperation{ Callback: b.pathRoleSecretIDList, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "secret-ids", - }, Responses: map[int][]framework.Response{ http.StatusOK: {{ Description: "OK", @@ -1002,11 +932,6 @@ Overrides secret_id_ttl role option when supplied. May not be longer than role's }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/secret-id/lookup/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "secret-id", - OperationVerb: "look-up", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -1075,10 +1000,6 @@ Overrides secret_id_ttl role option when supplied. May not be longer than role's }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/secret-id/destroy/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationVerb: "destroy", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -1093,16 +1014,10 @@ Overrides secret_id_ttl role option when supplied. May not be longer than role's logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathRoleSecretIDDestroyUpdateDelete, Responses: responseNoContent, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "secret-id", - }, }, logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathRoleSecretIDDestroyUpdateDelete, Responses: responseNoContent, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "secret-id2", - }, }, }, HelpSynopsis: strings.TrimSpace(roleHelp["role-secret-id-destroy"][0]), @@ -1110,11 +1025,6 @@ Overrides secret_id_ttl role option when supplied. May not be longer than role's }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/secret-id-accessor/lookup/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "secret-id-by-accessor", - OperationVerb: "look-up", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -1183,10 +1093,6 @@ Overrides secret_id_ttl role option when supplied. May not be longer than role's }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/secret-id-accessor/destroy/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationVerb: "destroy", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, @@ -1201,16 +1107,10 @@ Overrides secret_id_ttl role option when supplied. May not be longer than role's logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathRoleSecretIDAccessorDestroyUpdateDelete, Responses: responseNoContent, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "secret-id-by-accessor", - }, }, logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathRoleSecretIDAccessorDestroyUpdateDelete, Responses: responseNoContent, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "secret-id-by-accessor2", - }, }, }, HelpSynopsis: strings.TrimSpace(roleHelp["role-secret-id-accessor"][0]), @@ -1218,10 +1118,6 @@ Overrides secret_id_ttl role option when supplied. May not be longer than role's }, { Pattern: "role/" + framework.GenericNameRegex("role_name") + "/custom-secret-id$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "custom-secret-id", - }, Fields: map[string]*framework.FieldSchema{ "role_name": { Type: framework.TypeString, diff --git a/builtin/credential/approle/path_role_test.go b/builtin/credential/approle/path_role_test.go index a5ea9d1d515998..d6ceed858154d3 100644 --- a/builtin/credential/approle/path_role_test.go +++ b/builtin/credential/approle/path_role_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( @@ -58,10 +55,15 @@ func TestAppRole_LocalSecretIDsRead(t *testing.T) { } func TestAppRole_LocalNonLocalSecretIDs(t *testing.T) { + var resp *logical.Response + var err error + b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + // Create a role with local_secret_ids set - resp := b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole1", Operation: logical.CreateOperation, Storage: storage, @@ -71,9 +73,13 @@ func TestAppRole_LocalNonLocalSecretIDs(t *testing.T) { "local_secret_ids": true, }, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\n resp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) // Create another role without setting local_secret_ids - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole2", Operation: logical.CreateOperation, Storage: storage, @@ -82,46 +88,64 @@ func TestAppRole_LocalNonLocalSecretIDs(t *testing.T) { "bind_secret_id": true, }, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\n resp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) count := 10 // Create secret IDs on testrole1 for i := 0; i < count; i++ { - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole1/secret-id", Operation: logical.UpdateOperation, Storage: storage, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) } // Check the number of secret IDs generated - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole1/secret-id", Operation: logical.ListOperation, Storage: storage, }) - + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } if len(resp.Data["keys"].([]string)) != count { t.Fatalf("failed to list secret IDs") } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.ListOperation), resp, true) // Create secret IDs on testrole1 for i := 0; i < count; i++ { - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole2/secret-id", Operation: logical.UpdateOperation, Storage: storage, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) } - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole2/secret-id", Operation: logical.ListOperation, Storage: storage, }) - + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } if len(resp.Data["keys"].([]string)) != count { t.Fatalf("failed to list secret IDs") } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.ListOperation), resp, true) } func TestAppRole_UpgradeSecretIDPrefix(t *testing.T) { @@ -130,6 +154,8 @@ func TestAppRole_UpgradeSecretIDPrefix(t *testing.T) { b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + // Create a role entry directly in storage without SecretIDPrefix err = b.setRoleEntry(context.Background(), storage, "testrole", &roleStorageEntry{ RoleID: "testroleid", @@ -152,16 +178,19 @@ func TestAppRole_UpgradeSecretIDPrefix(t *testing.T) { } // Ensure that the API response contains local_secret_ids - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole", Operation: logical.ReadOperation, Storage: storage, }) - + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\n resp: %#v", err, resp) + } _, ok := resp.Data["local_secret_ids"] if !ok { t.Fatalf("expected local_secret_ids to be present in the response") } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) } func TestAppRole_LocalSecretIDImmutability(t *testing.T) { @@ -170,6 +199,8 @@ func TestAppRole_LocalSecretIDImmutability(t *testing.T) { b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "policies": []string{"default"}, "bind_secret_id": true, @@ -178,12 +209,16 @@ func TestAppRole_LocalSecretIDImmutability(t *testing.T) { } // Create a role with local_secret_ids set - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole", Operation: logical.CreateOperation, Storage: storage, Data: roleData, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\nresp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) // Attempt to modify local_secret_ids should fail resp, err = b.HandleRequest(context.Background(), &logical.Request{ @@ -206,6 +241,8 @@ func TestAppRole_UpgradeBoundCIDRList(t *testing.T) { b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "policies": []string{"default"}, "bind_secret_id": true, @@ -213,19 +250,27 @@ func TestAppRole_UpgradeBoundCIDRList(t *testing.T) { } // Create a role with bound_cidr_list set - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole", Operation: logical.CreateOperation, Storage: storage, Data: roleData, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\nresp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) // Read the role and check that the bound_cidr_list is set properly - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole", Operation: logical.ReadOperation, Storage: storage, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\nresp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) expected := []string{"127.0.0.1/18", "192.178.1.2/24"} actual := resp.Data["secret_id_bound_cidrs"].([]string) @@ -249,18 +294,22 @@ func TestAppRole_UpgradeBoundCIDRList(t *testing.T) { } // Read the role. The upgrade code should have migrated the old type to the new type - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole", Operation: logical.ReadOperation, Storage: storage, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\nresp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) if !reflect.DeepEqual(expected, actual) { t.Fatalf("bad: bound_cidr_list; expected: %#v\nactual: %#v\n", expected, actual) } // Create a secret-id by supplying a subset of the role's CIDR blocks with the new type - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole/secret-id", Operation: logical.UpdateOperation, Storage: storage, @@ -268,13 +317,17 @@ func TestAppRole_UpgradeBoundCIDRList(t *testing.T) { "cidr_list": []string{"127.0.0.1/24"}, }, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\nresp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) if resp.Data["secret_id"].(string) == "" { t.Fatalf("failed to generate secret-id") } // Check that the backwards compatibility for the string type is not broken - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrole/secret-id", Operation: logical.UpdateOperation, Storage: storage, @@ -282,6 +335,10 @@ func TestAppRole_UpgradeBoundCIDRList(t *testing.T) { "cidr_list": "127.0.0.1/24", }, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: err: %v\nresp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) if resp.Data["secret_id"].(string) == "" { t.Fatalf("failed to generate secret-id") @@ -295,6 +352,8 @@ func TestAppRole_RoleNameLowerCasing(t *testing.T) { b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + // Save a role with out LowerCaseRoleName set role := &roleStorageEntry{ RoleID: "testroleid", @@ -313,13 +372,17 @@ func TestAppRole_RoleNameLowerCasing(t *testing.T) { Operation: logical.UpdateOperation, Storage: storage, } - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) secretID = resp.Data["secret_id"].(string) roleID = "testroleid" // Regular login flow. This should succeed. - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "login", Operation: logical.UpdateOperation, Storage: storage, @@ -328,10 +391,17 @@ func TestAppRole_RoleNameLowerCasing(t *testing.T) { "secret_id": secretID, }, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } // Lower case the role name when generating the secret id secretIDReq.Path = "role/testrolename/secret-id" - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) secretID = resp.Data["secret_id"].(string) @@ -355,11 +425,15 @@ func TestAppRole_RoleNameLowerCasing(t *testing.T) { // Delete the role and create it again. This time don't directly persist // it, but route the request to the creation handler so that it sets the // LowerCaseRoleName to true. - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testRoleName", Operation: logical.DeleteOperation, Storage: storage, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.DeleteOperation), resp, true) roleReq := &logical.Request{ Path: "role/testRoleName", @@ -369,27 +443,39 @@ func TestAppRole_RoleNameLowerCasing(t *testing.T) { "bind_secret_id": true, }, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) // Create secret id with lower cased role name - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrolename/secret-id", Operation: logical.UpdateOperation, Storage: storage, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) secretID = resp.Data["secret_id"].(string) - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrolename/role-id", Operation: logical.ReadOperation, Storage: storage, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 14, logical.ReadOperation), resp, true) roleID = resp.Data["role_id"].(string) // Login should pass - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "login", Operation: logical.UpdateOperation, Storage: storage, @@ -398,9 +484,12 @@ func TestAppRole_RoleNameLowerCasing(t *testing.T) { "secret_id": secretID, }, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr:%v", resp, err) + } // Lookup of secret ID should work in case-insensitive manner - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrolename/secret-id/lookup", Operation: logical.UpdateOperation, Storage: storage, @@ -408,17 +497,24 @@ func TestAppRole_RoleNameLowerCasing(t *testing.T) { "secret_id": secretID, }, }) - + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } if resp == nil { t.Fatalf("failed to lookup secret IDs") } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 16, logical.UpdateOperation), resp, true) // Listing of secret IDs should work in case-insensitive manner - resp = b.requestNoErr(t, &logical.Request{ + resp, err = b.HandleRequest(context.Background(), &logical.Request{ Path: "role/testrolename/secret-id", Operation: logical.ListOperation, Storage: storage, }) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\nerr: %v", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.ListOperation), resp, true) if len(resp.Data["keys"].([]string)) != 1 { t.Fatalf("failed to list secret IDs") @@ -431,6 +527,8 @@ func TestAppRole_RoleReadSetIndex(t *testing.T) { b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleReq := &logical.Request{ Path: "role/testrole", Operation: logical.CreateOperation, @@ -441,7 +539,11 @@ func TestAppRole_RoleReadSetIndex(t *testing.T) { } // Create a role - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\n err: %v\n", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) roleIDReq := &logical.Request{ Path: "role/testrole/role-id", @@ -450,7 +552,11 @@ func TestAppRole_RoleReadSetIndex(t *testing.T) { } // Get the role ID - resp = b.requestNoErr(t, roleIDReq) + resp, err = b.HandleRequest(context.Background(), roleIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\n err: %v\n", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 14, logical.ReadOperation), resp, true) roleID := resp.Data["role_id"].(string) @@ -462,7 +568,10 @@ func TestAppRole_RoleReadSetIndex(t *testing.T) { // Read the role again. This should add the index and return a warning roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\n err: %v\n", resp, err) + } // Check if the warning is being returned if !strings.Contains(resp.Warnings[0], "Role identifier was missing an index back to role name.") { @@ -487,10 +596,18 @@ func TestAppRole_RoleReadSetIndex(t *testing.T) { // Check if updating and reading of roles work and that there are no lock // contentions dangling due to previous operation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\n err: %v\n", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("bad: resp: %#v\n err: %v\n", resp, err) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) } func TestAppRole_CIDRSubset(t *testing.T) { @@ -499,6 +616,8 @@ func TestAppRole_CIDRSubset(t *testing.T) { b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "role_id": "role-id-123", "policies": "a,b", @@ -512,7 +631,11 @@ func TestAppRole_CIDRSubset(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err: %v resp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) secretIDData := map[string]interface{}{ "cidr_list": "127.0.0.1/16", @@ -534,10 +657,21 @@ func TestAppRole_CIDRSubset(t *testing.T) { roleData["bound_cidr_list"] = "192.168.27.29/16,172.245.30.40/24,10.20.30.40/30" roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err: %v resp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), resp, true) secretIDData["cidr_list"] = "192.168.27.29/20,172.245.30.40/25,10.20.30.40/32" - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil { + t.Fatal(err) + } + if resp != nil && resp.IsError() { + t.Fatalf("resp: %#v", resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) } func TestAppRole_TokenBoundCIDRSubset32Mask(t *testing.T) { @@ -546,6 +680,8 @@ func TestAppRole_TokenBoundCIDRSubset32Mask(t *testing.T) { b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "role_id": "role-id-123", "policies": "a,b", @@ -559,7 +695,11 @@ func TestAppRole_TokenBoundCIDRSubset32Mask(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err: %v resp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) secretIDData := map[string]interface{}{ "token_bound_cidrs": "127.0.0.1/32", @@ -571,7 +711,11 @@ func TestAppRole_TokenBoundCIDRSubset32Mask(t *testing.T) { Data: secretIDData, } - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil { + t.Fatalf("err: %v resp: %#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) secretIDData = map[string]interface{}{ "token_bound_cidrs": "127.0.0.1/24", @@ -598,6 +742,8 @@ func TestAppRole_RoleConstraints(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "role_id": "role-id-123", "policies": "a,b", @@ -611,13 +757,21 @@ func TestAppRole_RoleConstraints(t *testing.T) { } // Set bind_secret_id, which is enabled by default - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) // Set bound_cidr_list alone by explicitly disabling bind_secret_id roleReq.Operation = logical.UpdateOperation roleData["bind_secret_id"] = false roleData["bound_cidr_list"] = "0.0.0.0/0" - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), resp, true) // Remove both constraints roleReq.Operation = logical.UpdateOperation @@ -630,12 +784,16 @@ func TestAppRole_RoleConstraints(t *testing.T) { if err == nil { t.Fatalf("expected an error") } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), resp, true) } func TestAppRole_RoleIDUpdate(t *testing.T) { var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "role_id": "role-id-123", "policies": "a,b", @@ -650,7 +808,11 @@ func TestAppRole_RoleIDUpdate(t *testing.T) { Storage: storage, Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) roleIDUpdateReq := &logical.Request{ Operation: logical.UpdateOperation, @@ -660,14 +822,22 @@ func TestAppRole_RoleIDUpdate(t *testing.T) { "role_id": "customroleid", }, } - resp = b.requestNoErr(t, roleIDUpdateReq) + resp, err = b.HandleRequest(context.Background(), roleIDUpdateReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 14, logical.UpdateOperation), resp, true) secretIDReq := &logical.Request{ Operation: logical.UpdateOperation, Storage: storage, Path: "role/testrole1/secret-id", } - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) secretID := resp.Data["secret_id"].(string) @@ -684,7 +854,10 @@ func TestAppRole_RoleIDUpdate(t *testing.T) { RemoteAddr: "127.0.0.1", }, } - resp = b.requestNoErr(t, loginReq) + resp, err = b.HandleRequest(context.Background(), loginReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } if resp.Auth == nil { t.Fatalf("expected a non-nil auth object in the response") @@ -696,6 +869,8 @@ func TestAppRole_RoleIDUniqueness(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "role_id": "role-id-123", "policies": "a,b", @@ -711,7 +886,11 @@ func TestAppRole_RoleIDUniqueness(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) roleReq.Path = "role/testrole2" resp, err = b.HandleRequest(context.Background(), roleReq) @@ -720,7 +899,11 @@ func TestAppRole_RoleIDUniqueness(t *testing.T) { } roleData["role_id"] = "role-id-456" - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) roleReq.Operation = logical.UpdateOperation roleData["role_id"] = "role-id-123" @@ -728,6 +911,7 @@ func TestAppRole_RoleIDUniqueness(t *testing.T) { if err == nil && !(resp != nil && resp.IsError()) { t.Fatalf("expected an error: got resp:%#v", resp) } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), resp, true) roleReq.Path = "role/testrole1" roleData["role_id"] = "role-id-456" @@ -758,17 +942,28 @@ func TestAppRole_RoleIDUniqueness(t *testing.T) { } roleIDData["role_id"] = "role-id-2000" - resp = b.requestNoErr(t, roleIDReq) + resp, err = b.HandleRequest(context.Background(), roleIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 14, logical.UpdateOperation), resp, true) roleIDData["role_id"] = "role-id-1000" roleIDReq.Path = "role/testrole1/role-id" - resp = b.requestNoErr(t, roleIDReq) + resp, err = b.HandleRequest(context.Background(), roleIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 14, logical.UpdateOperation), resp, true) } func TestAppRole_RoleDeleteSecretID(t *testing.T) { var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + createRole(t, b, storage, "role1", "a,b") secretIDReq := &logical.Request{ Operation: logical.UpdateOperation, @@ -776,16 +971,34 @@ func TestAppRole_RoleDeleteSecretID(t *testing.T) { Path: "role/role1/secret-id", } // Create 3 secrets on the role - resp = b.requestNoErr(t, secretIDReq) - resp = b.requestNoErr(t, secretIDReq) - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) + + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) + + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) listReq := &logical.Request{ Operation: logical.ListOperation, Storage: storage, Path: "role/role1/secret-id", } - resp = b.requestNoErr(t, listReq) + resp, err = b.HandleRequest(context.Background(), listReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.ListOperation), resp, true) secretIDAccessors := resp.Data["keys"].([]string) if len(secretIDAccessors) != 3 { @@ -797,9 +1010,13 @@ func TestAppRole_RoleDeleteSecretID(t *testing.T) { Storage: storage, Path: "role/role1", } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.DeleteOperation), resp, true) - resp, err := b.HandleRequest(context.Background(), listReq) + resp, err = b.HandleRequest(context.Background(), listReq) if err != nil || resp == nil || (resp != nil && !resp.IsError()) { t.Fatalf("expected an error. err:%v resp:%#v", err, resp) } @@ -810,13 +1027,19 @@ func TestAppRole_RoleSecretIDReadDelete(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + createRole(t, b, storage, "role1", "a,b") secretIDCreateReq := &logical.Request{ Operation: logical.UpdateOperation, Storage: storage, Path: "role/role1/secret-id", } - resp = b.requestNoErr(t, secretIDCreateReq) + resp, err = b.HandleRequest(context.Background(), secretIDCreateReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) secretID := resp.Data["secret_id"].(string) if secretID == "" { @@ -831,7 +1054,11 @@ func TestAppRole_RoleSecretIDReadDelete(t *testing.T) { "secret_id": secretID, }, } - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 16, logical.UpdateOperation), resp, true) if resp.Data == nil { t.Fatal(err) @@ -845,7 +1072,12 @@ func TestAppRole_RoleSecretIDReadDelete(t *testing.T) { "secret_id": secretID, }, } - resp = b.requestNoErr(t, deleteSecretIDReq) + resp, err = b.HandleRequest(context.Background(), deleteSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 17, logical.DeleteOperation), resp, true) + resp, err = b.HandleRequest(context.Background(), secretIDReq) if resp != nil && resp.IsError() { t.Fatalf("error response:%#v", resp) @@ -860,20 +1092,30 @@ func TestAppRole_RoleSecretIDAccessorReadDelete(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + createRole(t, b, storage, "role1", "a,b") secretIDReq := &logical.Request{ Operation: logical.UpdateOperation, Storage: storage, Path: "role/role1/secret-id", } - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) listReq := &logical.Request{ Operation: logical.ListOperation, Storage: storage, Path: "role/role1/secret-id", } - resp = b.requestNoErr(t, listReq) + resp, err = b.HandleRequest(context.Background(), listReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.ListOperation), resp, true) hmacSecretID := resp.Data["keys"].([]string)[0] @@ -885,14 +1127,21 @@ func TestAppRole_RoleSecretIDAccessorReadDelete(t *testing.T) { "secret_id_accessor": hmacSecretID, }, } - resp = b.requestNoErr(t, hmacReq) - + resp, err = b.HandleRequest(context.Background(), hmacReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } if resp.Data == nil { t.Fatal(err) } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 18, logical.UpdateOperation), resp, true) hmacReq.Path = "role/role1/secret-id-accessor/destroy" - resp = b.requestNoErr(t, hmacReq) + resp, err = b.HandleRequest(context.Background(), hmacReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 19, logical.UpdateOperation), resp, true) hmacReq.Operation = logical.ReadOperation resp, err = b.HandleRequest(context.Background(), hmacReq) @@ -934,8 +1183,11 @@ func TestAppRoleSecretIDLookup(t *testing.T) { func TestAppRoleRoleListSecretID(t *testing.T) { var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + createRole(t, b, storage, "role1", "a,b") secretIDReq := &logical.Request{ @@ -944,18 +1196,46 @@ func TestAppRoleRoleListSecretID(t *testing.T) { Path: "role/role1/secret-id", } // Create 5 'secret_id's - resp = b.requestNoErr(t, secretIDReq) - resp = b.requestNoErr(t, secretIDReq) - resp = b.requestNoErr(t, secretIDReq) - resp = b.requestNoErr(t, secretIDReq) - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) + + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) + + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) + + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) + + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) listReq := &logical.Request{ Operation: logical.ListOperation, Storage: storage, Path: "role/role1/secret-id/", } - resp = b.requestNoErr(t, listReq) + resp, err = b.HandleRequest(context.Background(), listReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.ListOperation), resp, true) secrets := resp.Data["keys"].([]string) if len(secrets) != 5 { @@ -965,8 +1245,11 @@ func TestAppRoleRoleListSecretID(t *testing.T) { func TestAppRole_RoleList(t *testing.T) { var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + createRole(t, b, storage, "role1", "a,b") createRole(t, b, storage, "role2", "c,d") createRole(t, b, storage, "role3", "e,f") @@ -978,7 +1261,11 @@ func TestAppRole_RoleList(t *testing.T) { Path: "role", Storage: storage, } - resp = b.requestNoErr(t, listReq) + resp, err = b.HandleRequest(context.Background(), listReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 1, logical.ListOperation), resp, true) actual := resp.Data["keys"].([]string) expected := []string{"role1", "role2", "role3", "role4", "role5"} @@ -989,8 +1276,11 @@ func TestAppRole_RoleList(t *testing.T) { func TestAppRole_RoleSecretIDWithoutFields(t *testing.T) { var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "policies": "p,q,r,s", "secret_id_num_uses": 10, @@ -1005,14 +1295,22 @@ func TestAppRole_RoleSecretIDWithoutFields(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) roleSecretIDReq := &logical.Request{ Operation: logical.UpdateOperation, Path: "role/role1/secret-id", Storage: storage, } - resp = b.requestNoErr(t, roleSecretIDReq) + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) if resp.Data["secret_id"].(string) == "" { t.Fatalf("failed to generate secret_id") @@ -1029,7 +1327,11 @@ func TestAppRole_RoleSecretIDWithoutFields(t *testing.T) { "secret_id": "abcd123", } roleSecretIDReq.Data = roleCustomSecretIDData - resp = b.requestNoErr(t, roleSecretIDReq) + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 20, logical.UpdateOperation), resp, true) if resp.Data["secret_id"] != "abcd123" { t.Fatalf("failed to set specific secret_id to role") @@ -1049,8 +1351,11 @@ func TestAppRole_RoleSecretIDWithValidFields(t *testing.T) { } var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "policies": "p,q,r,s", "secret_id_num_uses": 0, @@ -1065,7 +1370,11 @@ func TestAppRole_RoleSecretIDWithValidFields(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) testCases := []testCase{ { @@ -1096,7 +1405,11 @@ func TestAppRole_RoleSecretIDWithValidFields(t *testing.T) { roleCustomSecretIDData := tc.payload roleSecretIDReq.Data = roleCustomSecretIDData - resp = b.requestNoErr(t, roleSecretIDReq) + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) if resp.Data["secret_id"].(string) == "" { t.Fatalf("failed to generate secret_id") @@ -1110,7 +1423,11 @@ func TestAppRole_RoleSecretIDWithValidFields(t *testing.T) { roleSecretIDReq.Path = "role/role1/custom-secret-id" roleSecretIDReq.Data = roleCustomSecretIDData - resp = b.requestNoErr(t, roleSecretIDReq) + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 20, logical.UpdateOperation), resp, true) if resp.Data["secret_id"] != tc.payload["secret_id"] { t.Fatalf("failed to set specific secret_id to role") @@ -1210,6 +1527,8 @@ func TestAppRole_ErrorsRoleSecretIDWithInvalidFields(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + for i, rc := range roleTestCases { roleData := map[string]interface{}{ "policies": "p,q,r,s", @@ -1226,7 +1545,11 @@ func TestAppRole_ErrorsRoleSecretIDWithInvalidFields(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) for _, tc := range rc.cases { t.Run(fmt.Sprintf("%s/%s", rc.name, tc.name), func(t *testing.T) { @@ -1262,6 +1585,8 @@ func TestAppRole_RoleCRUD(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "policies": "p,q,r,s", "secret_id_num_uses": 10, @@ -1278,10 +1603,18 @@ func TestAppRole_RoleCRUD(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) expected := map[string]interface{}{ "bind_secret_id": true, @@ -1324,10 +1657,18 @@ func TestAppRole_RoleCRUD(t *testing.T) { roleReq.Data = roleData roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) expected = map[string]interface{}{ "policies": []string{"a", "b", "c", "d"}, @@ -1353,7 +1694,11 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RU for role_id field roleReq.Path = "role/role1/role-id" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 14, logical.ReadOperation), resp, true) if resp.Data["role_id"].(string) != "test_role_id" { t.Fatalf("bad: role_id: expected:test_role_id actual:%s\n", resp.Data["role_id"].(string)) @@ -1361,10 +1706,18 @@ func TestAppRole_RoleCRUD(t *testing.T) { roleReq.Data = map[string]interface{}{"role_id": "custom_role_id"} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 14, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 14, logical.ReadOperation), resp, true) if resp.Data["role_id"].(string) != "custom_role_id" { t.Fatalf("bad: role_id: expected:custom_role_id actual:%s\n", resp.Data["role_id"].(string)) @@ -1373,23 +1726,43 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RUD for bind_secret_id field roleReq.Path = "role/role1/bind-secret-id" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 7, logical.ReadOperation), resp, true) roleReq.Data = map[string]interface{}{"bind_secret_id": false} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 7, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 7, logical.ReadOperation), resp, true) if resp.Data["bind_secret_id"].(bool) { t.Fatalf("bad: bind_secret_id: expected:false actual:%t\n", resp.Data["bind_secret_id"].(bool)) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 7, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 7, logical.ReadOperation), resp, true) if !resp.Data["bind_secret_id"].(bool) { t.Fatalf("expected the default value of 'true' to be set") @@ -1398,14 +1771,26 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RUD for policies field roleReq.Path = "role/role1/policies" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 3, logical.ReadOperation), resp, true) roleReq.Data = map[string]interface{}{"policies": "a1,b1,c1,d1"} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 3, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 3, logical.ReadOperation), resp, true) if !reflect.DeepEqual(resp.Data["policies"].([]string), []string{"a1", "b1", "c1", "d1"}) { t.Fatalf("bad: policies: actual:%s\n", resp.Data["policies"].([]string)) @@ -1414,10 +1799,18 @@ func TestAppRole_RoleCRUD(t *testing.T) { t.Fatalf("bad: policies: actual:%s\n", resp.Data["policies"].([]string)) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 3, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 3, logical.ReadOperation), resp, true) expectedPolicies := []string{} actualPolicies := resp.Data["token_policies"].([]string) @@ -1428,23 +1821,43 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RUD for secret-id-num-uses field roleReq.Path = "role/role1/secret-id-num-uses" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 8, logical.ReadOperation), resp, true) roleReq.Data = map[string]interface{}{"secret_id_num_uses": 200} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 8, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 8, logical.ReadOperation), resp, true) if resp.Data["secret_id_num_uses"].(int) != 200 { t.Fatalf("bad: secret_id_num_uses: expected:200 actual:%d\n", resp.Data["secret_id_num_uses"].(int)) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 8, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 8, logical.ReadOperation), resp, true) if resp.Data["secret_id_num_uses"].(int) != 0 { t.Fatalf("expected value to be reset") @@ -1453,23 +1866,43 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RUD for secret_id_ttl field roleReq.Path = "role/role1/secret-id-ttl" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 9, logical.ReadOperation), resp, true) roleReq.Data = map[string]interface{}{"secret_id_ttl": 3001} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 9, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 9, logical.ReadOperation), resp, true) if resp.Data["secret_id_ttl"].(time.Duration) != 3001 { t.Fatalf("bad: secret_id_ttl: expected:3001 actual:%d\n", resp.Data["secret_id_ttl"].(time.Duration)) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 9, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 9, logical.ReadOperation), resp, true) if resp.Data["secret_id_ttl"].(time.Duration) != 0 { t.Fatalf("expected value to be reset") @@ -1478,7 +1911,11 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RUD for secret-id-num-uses field roleReq.Path = "role/role1/token-num-uses" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 11, logical.ReadOperation), resp, true) if resp.Data["token_num_uses"].(int) != 600 { t.Fatalf("bad: token_num_uses: expected:600 actual:%d\n", resp.Data["token_num_uses"].(int)) @@ -1486,20 +1923,36 @@ func TestAppRole_RoleCRUD(t *testing.T) { roleReq.Data = map[string]interface{}{"token_num_uses": 60} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 11, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 11, logical.ReadOperation), resp, true) if resp.Data["token_num_uses"].(int) != 60 { t.Fatalf("bad: token_num_uses: expected:60 actual:%d\n", resp.Data["token_num_uses"].(int)) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 11, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 11, logical.ReadOperation), resp, true) if resp.Data["token_num_uses"].(int) != 0 { t.Fatalf("expected value to be reset") @@ -1508,23 +1961,43 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RUD for 'period' field roleReq.Path = "role/role1/period" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 10, logical.ReadOperation), resp, true) roleReq.Data = map[string]interface{}{"period": 9001} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 10, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 10, logical.ReadOperation), resp, true) if resp.Data["period"].(time.Duration) != 9001 { t.Fatalf("bad: period: expected:9001 actual:%d\n", resp.Data["9001"].(time.Duration)) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 10, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 10, logical.ReadOperation), resp, true) if resp.Data["token_period"].(time.Duration) != 0 { t.Fatalf("expected value to be reset") @@ -1533,23 +2006,43 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RUD for token_ttl field roleReq.Path = "role/role1/token-ttl" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 12, logical.ReadOperation), resp, true) roleReq.Data = map[string]interface{}{"token_ttl": 4001} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 12, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 12, logical.ReadOperation), resp, true) if resp.Data["token_ttl"].(time.Duration) != 4001 { t.Fatalf("bad: token_ttl: expected:4001 actual:%d\n", resp.Data["token_ttl"].(time.Duration)) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 12, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 12, logical.ReadOperation), resp, true) if resp.Data["token_ttl"].(time.Duration) != 0 { t.Fatalf("expected value to be reset") @@ -1558,23 +2051,43 @@ func TestAppRole_RoleCRUD(t *testing.T) { // RUD for token_max_ttl field roleReq.Path = "role/role1/token-max-ttl" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 13, logical.ReadOperation), resp, true) roleReq.Data = map[string]interface{}{"token_max_ttl": 5001} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 13, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 13, logical.ReadOperation), resp, true) if resp.Data["token_max_ttl"].(time.Duration) != 5001 { t.Fatalf("bad: token_max_ttl: expected:5001 actual:%d\n", resp.Data["token_max_ttl"].(time.Duration)) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 13, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 13, logical.ReadOperation), resp, true) if resp.Data["token_max_ttl"].(time.Duration) != 0 { t.Fatalf("expected value to be reset") @@ -1583,7 +2096,11 @@ func TestAppRole_RoleCRUD(t *testing.T) { // Delete test for role roleReq.Path = "role/role1" roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation resp, err = b.HandleRequest(context.Background(), roleReq) @@ -1601,6 +2118,8 @@ func TestAppRole_RoleWithTokenBoundCIDRsCRUD(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "policies": "p,q,r,s", "secret_id_num_uses": 10, @@ -1618,10 +2137,18 @@ func TestAppRole_RoleWithTokenBoundCIDRsCRUD(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) expected := map[string]interface{}{ "bind_secret_id": true, @@ -1664,10 +2191,18 @@ func TestAppRole_RoleWithTokenBoundCIDRsCRUD(t *testing.T) { roleReq.Data = roleData roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) expected = map[string]interface{}{ "policies": []string{"a", "b", "c", "d"}, @@ -1693,7 +2228,11 @@ func TestAppRole_RoleWithTokenBoundCIDRsCRUD(t *testing.T) { // RUD for secret-id-bound-cidrs field roleReq.Path = "role/role1/secret-id-bound-cidrs" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 5, logical.ReadOperation), resp, true) if resp.Data["secret_id_bound_cidrs"].([]string)[0] != "127.0.0.1/32" || resp.Data["secret_id_bound_cidrs"].([]string)[1] != "127.0.0.1/16" { @@ -1702,20 +2241,36 @@ func TestAppRole_RoleWithTokenBoundCIDRsCRUD(t *testing.T) { roleReq.Data = map[string]interface{}{"secret_id_bound_cidrs": []string{"127.0.0.1/20"}} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 5, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 5, logical.ReadOperation), resp, true) if resp.Data["secret_id_bound_cidrs"].([]string)[0] != "127.0.0.1/20" { t.Fatalf("bad: secret_id_bound_cidrs: expected:127.0.0.1/20 actual:%s\n", resp.Data["secret_id_bound_cidrs"].([]string)[0]) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 5, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 5, logical.ReadOperation), resp, true) if len(resp.Data["secret_id_bound_cidrs"].([]string)) != 0 { t.Fatalf("expected value to be reset") @@ -1724,7 +2279,11 @@ func TestAppRole_RoleWithTokenBoundCIDRsCRUD(t *testing.T) { // RUD for token-bound-cidrs field roleReq.Path = "role/role1/token-bound-cidrs" roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 6, logical.ReadOperation), resp, true) if resp.Data["token_bound_cidrs"].([]*sockaddr.SockAddrMarshaler)[0].String() != "127.0.0.1" || resp.Data["token_bound_cidrs"].([]*sockaddr.SockAddrMarshaler)[1].String() != "127.0.0.1/16" { @@ -1737,20 +2296,36 @@ func TestAppRole_RoleWithTokenBoundCIDRsCRUD(t *testing.T) { roleReq.Data = map[string]interface{}{"token_bound_cidrs": []string{"127.0.0.1/20"}} roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 6, logical.UpdateOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 6, logical.ReadOperation), resp, true) if resp.Data["token_bound_cidrs"].([]*sockaddr.SockAddrMarshaler)[0].String() != "127.0.0.1/20" { t.Fatalf("bad: token_bound_cidrs: expected:127.0.0.1/20 actual:%s\n", resp.Data["token_bound_cidrs"].([]*sockaddr.SockAddrMarshaler)[0]) } roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 6, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 6, logical.ReadOperation), resp, true) if len(resp.Data["token_bound_cidrs"].([]*sockaddr.SockAddrMarshaler)) != 0 { t.Fatalf("expected value to be reset") @@ -1759,13 +2334,18 @@ func TestAppRole_RoleWithTokenBoundCIDRsCRUD(t *testing.T) { // Delete test for role roleReq.Path = "role/role1" roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation resp, err = b.HandleRequest(context.Background(), roleReq) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("err:%v resp:%#v", err, resp) } + if resp != nil { t.Fatalf("expected a nil response") } @@ -1776,6 +2356,8 @@ func TestAppRole_RoleWithTokenTypeCRUD(t *testing.T) { var err error b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + roleData := map[string]interface{}{ "policies": "p,q,r,s", "secret_id_num_uses": 10, @@ -1792,14 +2374,22 @@ func TestAppRole_RoleWithTokenTypeCRUD(t *testing.T) { Data: roleData, } - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) if 0 == len(resp.Warnings) { t.Fatalf("bad:\nexpected warning in resp:%#v\n", resp.Warnings) } roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) expected := map[string]interface{}{ "bind_secret_id": true, @@ -1841,14 +2431,22 @@ func TestAppRole_RoleWithTokenTypeCRUD(t *testing.T) { roleReq.Data = roleData roleReq.Operation = logical.UpdateOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), resp, true) if 0 == len(resp.Warnings) { t.Fatalf("bad:\nexpected a warning in resp:%#v\n", resp.Warnings) } roleReq.Operation = logical.ReadOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.ReadOperation), resp, true) expected = map[string]interface{}{ "policies": []string{"a", "b", "c", "d"}, @@ -1875,13 +2473,18 @@ func TestAppRole_RoleWithTokenTypeCRUD(t *testing.T) { // Delete test for role roleReq.Path = "role/role1" roleReq.Operation = logical.DeleteOperation - resp = b.requestNoErr(t, roleReq) + resp, err = b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.DeleteOperation), resp, true) roleReq.Operation = logical.ReadOperation resp, err = b.HandleRequest(context.Background(), roleReq) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("err:%v resp:%#v", err, resp) } + if resp != nil { t.Fatalf("expected a nil response") } @@ -1901,7 +2504,14 @@ func createRole(t *testing.T, b *backend, s logical.Storage, roleName, policies Storage: s, Data: roleData, } - _ = b.requestNoErr(t, roleReq) + + paths := rolePaths(b) + + resp, err := b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) } // TestAppRole_TokenutilUpgrade ensures that when we read values out that are @@ -2023,6 +2633,8 @@ func TestAppRole_SecretID_WithTTL(t *testing.T) { b, storage := createBackendWithStorage(t) + paths := rolePaths(b) + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // Create role @@ -2037,7 +2649,11 @@ func TestAppRole_SecretID_WithTTL(t *testing.T) { Storage: storage, Data: roleData, } - resp := b.requestNoErr(t, roleReq) + resp, err := b.HandleRequest(context.Background(), roleReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 0, logical.CreateOperation), resp, true) // Generate secret ID secretIDReq := &logical.Request{ @@ -2045,7 +2661,11 @@ func TestAppRole_SecretID_WithTTL(t *testing.T) { Path: "role/" + tt.roleName + "/secret-id", Storage: storage, } - resp = b.requestNoErr(t, secretIDReq) + resp, err = b.HandleRequest(context.Background(), secretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } + schema.ValidateResponse(t, schema.FindResponseSchema(t, paths, 15, logical.UpdateOperation), resp, true) // Extract the "ttl" value from the response data if it exists ttlRaw, okTTL := resp.Data["secret_id_ttl"] @@ -2059,7 +2679,7 @@ func TestAppRole_SecretID_WithTTL(t *testing.T) { ) respTTL, ok = ttlRaw.(int64) if !ok { - t.Fatalf("expected ttl to be an integer, got: %T", ttlRaw) + t.Fatalf("expected ttl to be an integer, got: %s", err) } // Verify secret ID response for different cases diff --git a/builtin/credential/approle/path_tidy_user_id.go b/builtin/credential/approle/path_tidy_user_id.go index b6c777b1496149..3f031267928ca6 100644 --- a/builtin/credential/approle/path_tidy_user_id.go +++ b/builtin/credential/approle/path_tidy_user_id.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( @@ -20,12 +17,6 @@ func pathTidySecretID(b *backend) *framework.Path { return &framework.Path{ Pattern: "tidy/secret-id$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAppRole, - OperationSuffix: "secret-id", - OperationVerb: "tidy", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathTidySecretIDUpdate, diff --git a/builtin/credential/approle/path_tidy_user_id_test.go b/builtin/credential/approle/path_tidy_user_id_test.go index c03686e89cbf90..c0590e1edec6ef 100644 --- a/builtin/credential/approle/path_tidy_user_id_test.go +++ b/builtin/credential/approle/path_tidy_user_id_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( @@ -11,13 +8,18 @@ import ( "testing" "time" + "github.com/hashicorp/vault/sdk/framework" "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" "github.com/hashicorp/vault/sdk/logical" ) func TestAppRole_TidyDanglingAccessors_Normal(t *testing.T) { + var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := []*framework.Path{pathTidySecretID(b)} + // Create a role createRole(t, b, storage, "role1", "a,b,c") @@ -27,7 +29,10 @@ func TestAppRole_TidyDanglingAccessors_Normal(t *testing.T) { Path: "role/role1/secret-id", Storage: storage, } - _ = b.requestNoErr(t, roleSecretIDReq) + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } accessorHashes, err := storage.List(context.Background(), "accessor/") if err != nil { @@ -80,7 +85,7 @@ func TestAppRole_TidyDanglingAccessors_Normal(t *testing.T) { } schema.ValidateResponse( t, - schema.GetResponseSchema(t, pathTidySecretID(b), logical.UpdateOperation), + schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), secret, true, ) @@ -98,8 +103,12 @@ func TestAppRole_TidyDanglingAccessors_Normal(t *testing.T) { } func TestAppRole_TidyDanglingAccessors_RaceTest(t *testing.T) { + var resp *logical.Response + var err error b, storage := createBackendWithStorage(t) + paths := []*framework.Path{pathTidySecretID(b)} + // Create a role createRole(t, b, storage, "role1", "a,b,c") @@ -109,8 +118,10 @@ func TestAppRole_TidyDanglingAccessors_RaceTest(t *testing.T) { Path: "role/role1/secret-id", Storage: storage, } - _ = b.requestNoErr(t, roleSecretIDReq) - + resp, err = b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } count := 1 wg := &sync.WaitGroup{} @@ -125,7 +136,7 @@ func TestAppRole_TidyDanglingAccessors_RaceTest(t *testing.T) { } schema.ValidateResponse( t, - schema.GetResponseSchema(t, pathTidySecretID(b), logical.UpdateOperation), + schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), secret, true, ) @@ -138,7 +149,10 @@ func TestAppRole_TidyDanglingAccessors_RaceTest(t *testing.T) { Path: "role/role1/secret-id", Storage: storage, } - _ = b.requestNoErr(t, roleSecretIDReq) + resp, err := b.HandleRequest(context.Background(), roleSecretIDReq) + if err != nil || (resp != nil && resp.IsError()) { + t.Fatalf("err:%v resp:%#v", err, resp) + } }() entry, err := logical.StorageEntryJSON( @@ -179,7 +193,7 @@ func TestAppRole_TidyDanglingAccessors_RaceTest(t *testing.T) { } schema.ValidateResponse( t, - schema.GetResponseSchema(t, pathTidySecretID(b), logical.UpdateOperation), + schema.FindResponseSchema(t, paths, 0, logical.UpdateOperation), secret, true, ) diff --git a/builtin/credential/approle/validation.go b/builtin/credential/approle/validation.go index 70f2194aa6783a..9b3f87827013aa 100644 --- a/builtin/credential/approle/validation.go +++ b/builtin/credential/approle/validation.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( diff --git a/builtin/credential/approle/validation_test.go b/builtin/credential/approle/validation_test.go index 7f7366b6793772..ff325f4b1be959 100644 --- a/builtin/credential/approle/validation_test.go +++ b/builtin/credential/approle/validation_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( diff --git a/builtin/credential/aws/backend.go b/builtin/credential/aws/backend.go index e8424f2c4956ce..543608968396f3 100644 --- a/builtin/credential/aws/backend.go +++ b/builtin/credential/aws/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -20,10 +17,7 @@ import ( cache "github.com/patrickmn/go-cache" ) -const ( - amzHeaderPrefix = "X-Amz-" - operationPrefixAWS = "aws" -) +const amzHeaderPrefix = "X-Amz-" var defaultAllowedSTSRequestHeaders = []string{ "X-Amz-Algorithm", @@ -129,9 +123,7 @@ func Backend(_ *logical.BackendConfig) (*backend, error) { deprecatedTerms: strings.NewReplacer( "accesslist", "whitelist", - "access-list", "whitelist", "denylist", "blacklist", - "deny-list", "blacklist", ), } @@ -317,7 +309,7 @@ func (b *backend) resolveArnToRealUniqueId(ctx context.Context, s logical.Storag switch entity.Type { case "user": - userInfo, err := iamClient.GetUserWithContext(ctx, &iam.GetUserInput{UserName: &entity.FriendlyName}) + userInfo, err := iamClient.GetUser(&iam.GetUserInput{UserName: &entity.FriendlyName}) if err != nil { return "", awsutil.AppendAWSError(err) } @@ -326,7 +318,7 @@ func (b *backend) resolveArnToRealUniqueId(ctx context.Context, s logical.Storag } return *userInfo.User.UserId, nil case "role": - roleInfo, err := iamClient.GetRoleWithContext(ctx, &iam.GetRoleInput{RoleName: &entity.FriendlyName}) + roleInfo, err := iamClient.GetRole(&iam.GetRoleInput{RoleName: &entity.FriendlyName}) if err != nil { return "", awsutil.AppendAWSError(err) } @@ -335,7 +327,7 @@ func (b *backend) resolveArnToRealUniqueId(ctx context.Context, s logical.Storag } return *roleInfo.Role.RoleId, nil case "instance-profile": - profileInfo, err := iamClient.GetInstanceProfileWithContext(ctx, &iam.GetInstanceProfileInput{InstanceProfileName: &entity.FriendlyName}) + profileInfo, err := iamClient.GetInstanceProfile(&iam.GetInstanceProfileInput{InstanceProfileName: &entity.FriendlyName}) if err != nil { return "", awsutil.AppendAWSError(err) } @@ -348,33 +340,13 @@ func (b *backend) resolveArnToRealUniqueId(ctx context.Context, s logical.Storag } } -// genDeprecatedPath will return a deprecated version of a framework.Path. The -// path pattern and display attributes (if any) will contain deprecated terms, -// and the path will be marked as deprecated. +// genDeprecatedPath will return a deprecated version of a framework.Path. The will include +// using deprecated terms in the path pattern, and marking the path as deprecated. func (b *backend) genDeprecatedPath(path *framework.Path) *framework.Path { pathDeprecated := *path pathDeprecated.Pattern = b.deprecatedTerms.Replace(path.Pattern) pathDeprecated.Deprecated = true - if path.DisplayAttrs != nil { - deprecatedDisplayAttrs := *path.DisplayAttrs - deprecatedDisplayAttrs.OperationPrefix = b.deprecatedTerms.Replace(path.DisplayAttrs.OperationPrefix) - deprecatedDisplayAttrs.OperationVerb = b.deprecatedTerms.Replace(path.DisplayAttrs.OperationVerb) - deprecatedDisplayAttrs.OperationSuffix = b.deprecatedTerms.Replace(path.DisplayAttrs.OperationSuffix) - pathDeprecated.DisplayAttrs = &deprecatedDisplayAttrs - } - - for i, op := range path.Operations { - if op.Properties().DisplayAttrs != nil { - deprecatedDisplayAttrs := *op.Properties().DisplayAttrs - deprecatedDisplayAttrs.OperationPrefix = b.deprecatedTerms.Replace(op.Properties().DisplayAttrs.OperationPrefix) - deprecatedDisplayAttrs.OperationVerb = b.deprecatedTerms.Replace(op.Properties().DisplayAttrs.OperationVerb) - deprecatedDisplayAttrs.OperationSuffix = b.deprecatedTerms.Replace(op.Properties().DisplayAttrs.OperationSuffix) - deprecatedProperties := pathDeprecated.Operations[i].(*framework.PathOperation) - deprecatedProperties.DisplayAttrs = &deprecatedDisplayAttrs - } - } - return &pathDeprecated } diff --git a/builtin/credential/aws/backend_e2e_test.go b/builtin/credential/aws/backend_e2e_test.go index e8939b9d67afd8..ac2bb22f129ae0 100644 --- a/builtin/credential/aws/backend_e2e_test.go +++ b/builtin/credential/aws/backend_e2e_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( diff --git a/builtin/credential/aws/backend_test.go b/builtin/credential/aws/backend_test.go index dea280c00262d2..5b435d3e3a5ce0 100644 --- a/builtin/credential/aws/backend_test.go +++ b/builtin/credential/aws/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -1524,7 +1521,6 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { return } - ctx := context.Background() storage := &logical.InmemStorage{} config := logical.TestBackendConfig() config.StorageView = storage @@ -1603,7 +1599,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { Storage: storage, Data: clientConfigData, } - _, err = b.HandleRequest(ctx, clientRequest) + _, err = b.HandleRequest(context.Background(), clientRequest) if err != nil { t.Fatal(err) } @@ -1617,7 +1613,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { Storage: storage, Data: configIdentityData, } - resp, err := b.HandleRequest(ctx, configIdentityRequest) + resp, err := b.HandleRequest(context.Background(), configIdentityRequest) if err != nil { t.Fatal(err) } @@ -1637,7 +1633,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { Storage: storage, Data: roleData, } - resp, err = b.HandleRequest(ctx, roleRequest) + resp, err = b.HandleRequest(context.Background(), roleRequest) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("bad: failed to create role: resp:%#v\nerr:%v", resp, err) } @@ -1654,7 +1650,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { Storage: storage, Data: roleDataEc2, } - resp, err = b.HandleRequest(ctx, roleRequestEc2) + resp, err = b.HandleRequest(context.Background(), roleRequestEc2) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("bad: failed to create role; resp:%#v\nerr:%v", resp, err) } @@ -1692,7 +1688,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { Storage: storage, Data: loginData, } - resp, err = b.HandleRequest(ctx, loginRequest) + resp, err = b.HandleRequest(context.Background(), loginRequest) if err != nil || resp == nil || !resp.IsError() { t.Errorf("bad: expected failed login due to missing header: resp:%#v\nerr:%v", resp, err) } @@ -1715,7 +1711,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { Storage: storage, Data: loginData, } - resp, err = b.HandleRequest(ctx, loginRequest) + resp, err = b.HandleRequest(context.Background(), loginRequest) if err != nil || resp == nil || !resp.IsError() { t.Errorf("bad: expected failed login due to invalid header: resp:%#v\nerr:%v", resp, err) } @@ -1734,13 +1730,13 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { Storage: storage, Data: loginData, } - resp, err = b.HandleRequest(ctx, loginRequest) + resp, err = b.HandleRequest(context.Background(), loginRequest) if err != nil || resp == nil || !resp.IsError() { t.Errorf("bad: expected failed login due to invalid role: resp:%#v\nerr:%v", resp, err) } loginData["role"] = "ec2only" - resp, err = b.HandleRequest(ctx, loginRequest) + resp, err = b.HandleRequest(context.Background(), loginRequest) if err != nil || resp == nil || !resp.IsError() { t.Errorf("bad: expected failed login due to bad auth type: resp:%#v\nerr:%v", resp, err) } @@ -1748,7 +1744,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { // finally, the happy path test :) loginData["role"] = testValidRoleName - resp, err = b.HandleRequest(ctx, loginRequest) + resp, err = b.HandleRequest(context.Background(), loginRequest) if err != nil { t.Fatal(err) } @@ -1771,7 +1767,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { Schema: b.pathLogin().Fields, } // ensure we can renew - resp, err = b.pathLoginRenew(ctx, renewReq, emptyLoginFd) + resp, err = b.pathLoginRenew(context.Background(), renewReq, emptyLoginFd) if err != nil { t.Fatal(err) } @@ -1789,17 +1785,17 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { // pick up the fake user ID roleData["bound_iam_principal_arn"] = entity.canonicalArn() roleRequest.Path = "role/" + testValidRoleName - resp, err = b.HandleRequest(ctx, roleRequest) + resp, err = b.HandleRequest(context.Background(), roleRequest) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("bad: failed to recreate role: resp:%#v\nerr:%v", resp, err) } - resp, err = b.HandleRequest(ctx, loginRequest) + resp, err = b.HandleRequest(context.Background(), loginRequest) if err != nil || resp == nil || !resp.IsError() { t.Errorf("bad: expected failed login due to changed AWS role ID: resp: %#v\nerr:%v", resp, err) } // and ensure a renew no longer works - resp, err = b.pathLoginRenew(ctx, renewReq, emptyLoginFd) + resp, err = b.pathLoginRenew(context.Background(), renewReq, emptyLoginFd) if err == nil || (resp != nil && !resp.IsError()) { t.Errorf("bad: expected failed renew due to changed AWS role ID: resp: %#v", resp) } @@ -1812,13 +1808,13 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { wildcardEntity.FriendlyName = "*" roleData["bound_iam_principal_arn"] = []string{wildcardEntity.canonicalArn(), "arn:aws:iam::123456789012:role/DoesNotExist/Vault_Fake_Role*"} roleRequest.Path = "role/" + wildcardRoleName - resp, err = b.HandleRequest(ctx, roleRequest) + resp, err = b.HandleRequest(context.Background(), roleRequest) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("bad: failed to create wildcard roles: resp:%#v\nerr:%v", resp, err) } loginData["role"] = wildcardRoleName - resp, err = b.HandleRequest(ctx, loginRequest) + resp, err = b.HandleRequest(context.Background(), loginRequest) if err != nil { t.Fatal(err) } @@ -1827,7 +1823,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { } // and ensure we can renew renewReq = generateRenewRequest(storage, resp.Auth) - resp, err = b.pathLoginRenew(ctx, renewReq, emptyLoginFd) + resp, err = b.pathLoginRenew(context.Background(), renewReq, emptyLoginFd) if err != nil { t.Fatal(err) } @@ -1838,17 +1834,7 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { t.Fatalf("got error when renewing: %#v", *resp) } // ensure the cache is populated - - clientUserIDRaw, ok := resp.Auth.InternalData["client_user_id"] - if !ok { - t.Errorf("client_user_id not found in response") - } - clientUserID, ok := clientUserIDRaw.(string) - if !ok { - t.Errorf("client_user_id is not a string: %#v", clientUserIDRaw) - } - - cachedArn := b.getCachedUserId(clientUserID) + cachedArn := b.getCachedUserId(resp.Auth.Metadata["client_user_id"]) if cachedArn == "" { t.Errorf("got empty ARN back from user ID cache; expected full arn") } @@ -1857,13 +1843,13 @@ func TestBackendAcc_LoginWithCallerIdentity(t *testing.T) { period := 600 * time.Second roleData["period"] = period.String() roleRequest.Path = "role/" + testValidRoleName - resp, err = b.HandleRequest(ctx, roleRequest) + resp, err = b.HandleRequest(context.Background(), roleRequest) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("bad: failed to create wildcard role: resp:%#v\nerr:%v", resp, err) } loginData["role"] = testValidRoleName - resp, err = b.HandleRequest(ctx, loginRequest) + resp, err = b.HandleRequest(context.Background(), loginRequest) if err != nil { t.Fatal(err) } diff --git a/builtin/credential/aws/certificates.go b/builtin/credential/aws/certificates.go index 4b97a952b6fac6..c745ad2b3f2437 100644 --- a/builtin/credential/aws/certificates.go +++ b/builtin/credential/aws/certificates.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( diff --git a/builtin/credential/aws/cli.go b/builtin/credential/aws/cli.go index a1695574f3b697..7b063fa5f42a08 100644 --- a/builtin/credential/aws/cli.go +++ b/builtin/credential/aws/cli.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( diff --git a/builtin/credential/aws/client.go b/builtin/credential/aws/client.go index 314c97ec395aa7..ff8ff5c837f9b9 100644 --- a/builtin/credential/aws/client.go +++ b/builtin/credential/aws/client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -122,7 +119,7 @@ func (b *backend) getClientConfig(ctx context.Context, s logical.Storage, region return nil, fmt.Errorf("could not obtain sts client: %w", err) } inputParams := &sts.GetCallerIdentityInput{} - identity, err := client.GetCallerIdentityWithContext(ctx, inputParams) + identity, err := client.GetCallerIdentity(inputParams) if err != nil { return nil, fmt.Errorf("unable to fetch current caller: %w", err) } diff --git a/builtin/credential/aws/cmd/aws/main.go b/builtin/credential/aws/cmd/aws/main.go index c7fce3e33fab9f..6de96d02d1964b 100644 --- a/builtin/credential/aws/cmd/aws/main.go +++ b/builtin/credential/aws/cmd/aws/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: awsauth.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/credential/aws/path_config_certificate.go b/builtin/credential/aws/path_config_certificate.go index 36dfe3c213932c..f734694781b351 100644 --- a/builtin/credential/aws/path_config_certificate.go +++ b/builtin/credential/aws/path_config_certificate.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -21,11 +18,6 @@ func (b *backend) pathListCertificates() *framework.Path { return &framework.Path{ Pattern: "config/certificates/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "certificate-configurations", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathCertificatesList, @@ -40,11 +32,6 @@ func (b *backend) pathListCertificates() *framework.Path { func (b *backend) pathConfigCertificate() *framework.Path { return &framework.Path{ Pattern: "config/certificate/" + framework.GenericNameRegex("cert_name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - }, - Fields: map[string]*framework.FieldSchema{ "cert_name": { Type: framework.TypeString, @@ -71,29 +58,15 @@ vary. Defaults to "pkcs7".`, Operations: map[logical.Operation]framework.OperationHandler{ logical.CreateOperation: &framework.PathOperation{ Callback: b.pathConfigCertificateCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "certificate", - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathConfigCertificateCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "certificate", - }, }, logical.ReadOperation: &framework.PathOperation{ Callback: b.pathConfigCertificateRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "certificate-configuration", - }, }, logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathConfigCertificateDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "certificate-configuration", - }, }, }, diff --git a/builtin/credential/aws/path_config_client.go b/builtin/credential/aws/path_config_client.go index 979fac11a9d8c2..c609e1acd608aa 100644 --- a/builtin/credential/aws/path_config_client.go +++ b/builtin/credential/aws/path_config_client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -19,11 +16,6 @@ import ( func (b *backend) pathConfigClient() *framework.Path { return &framework.Path{ Pattern: "config/client$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - }, - Fields: map[string]*framework.FieldSchema{ "access_key": { Type: framework.TypeString, @@ -85,29 +77,15 @@ func (b *backend) pathConfigClient() *framework.Path { Operations: map[logical.Operation]framework.OperationHandler{ logical.CreateOperation: &framework.PathOperation{ Callback: b.pathConfigClientCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "client", - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathConfigClientCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "client", - }, }, logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathConfigClientDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "client-configuration", - }, }, logical.ReadOperation: &framework.PathOperation{ Callback: b.pathConfigClientRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "client-configuration", - }, }, }, diff --git a/builtin/credential/aws/path_config_client_test.go b/builtin/credential/aws/path_config_client_test.go index 4c807d1b40f8c9..493d20d9df0009 100644 --- a/builtin/credential/aws/path_config_client_test.go +++ b/builtin/credential/aws/path_config_client_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( diff --git a/builtin/credential/aws/path_config_identity.go b/builtin/credential/aws/path_config_identity.go index 2512c9db39fd23..282d277fab5471 100644 --- a/builtin/credential/aws/path_config_identity.go +++ b/builtin/credential/aws/path_config_identity.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -57,11 +54,6 @@ var ( func (b *backend) pathConfigIdentity() *framework.Path { return &framework.Path{ Pattern: "config/identity$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - }, - Fields: map[string]*framework.FieldSchema{ "iam_alias": { Type: framework.TypeString, @@ -80,16 +72,9 @@ func (b *backend) pathConfigIdentity() *framework.Path { Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ Callback: pathConfigIdentityRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "identity-integration-configuration", - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: pathConfigIdentityUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "identity-integration", - }, }, }, diff --git a/builtin/credential/aws/path_config_identity_test.go b/builtin/credential/aws/path_config_identity_test.go index 8a7db09f2bbdc5..19e919fb1179d3 100644 --- a/builtin/credential/aws/path_config_identity_test.go +++ b/builtin/credential/aws/path_config_identity_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( diff --git a/builtin/credential/aws/path_config_rotate_root.go b/builtin/credential/aws/path_config_rotate_root.go index 141b7e2fbea57b..125056234312bd 100644 --- a/builtin/credential/aws/path_config_rotate_root.go +++ b/builtin/credential/aws/path_config_rotate_root.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -24,12 +21,6 @@ func (b *backend) pathConfigRotateRoot() *framework.Path { return &framework.Path{ Pattern: "config/rotate-root", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationVerb: "rotate", - OperationSuffix: "root-credentials", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathConfigRotateRootUpdate, @@ -106,7 +97,7 @@ func (b *backend) pathConfigRotateRootUpdate(ctx context.Context, req *logical.R // Get the current user's name since it's required to create an access key. // Empty input means get the current user. var getUserInput iam.GetUserInput - getUserRes, err := iamClient.GetUserWithContext(ctx, &getUserInput) + getUserRes, err := iamClient.GetUser(&getUserInput) if err != nil { return nil, fmt.Errorf("error calling GetUser: %w", err) } @@ -124,7 +115,7 @@ func (b *backend) pathConfigRotateRootUpdate(ctx context.Context, req *logical.R createAccessKeyInput := iam.CreateAccessKeyInput{ UserName: getUserRes.User.UserName, } - createAccessKeyRes, err := iamClient.CreateAccessKeyWithContext(ctx, &createAccessKeyInput) + createAccessKeyRes, err := iamClient.CreateAccessKey(&createAccessKeyInput) if err != nil { return nil, fmt.Errorf("error calling CreateAccessKey: %w", err) } @@ -148,7 +139,7 @@ func (b *backend) pathConfigRotateRootUpdate(ctx context.Context, req *logical.R AccessKeyId: createAccessKeyRes.AccessKey.AccessKeyId, UserName: getUserRes.User.UserName, } - if _, err := iamClient.DeleteAccessKeyWithContext(ctx, &deleteAccessKeyInput); err != nil { + if _, err := iamClient.DeleteAccessKey(&deleteAccessKeyInput); err != nil { // Include this error in the errs returned by this method. errs = multierror.Append(errs, fmt.Errorf("error deleting newly created but unstored access key ID %s: %s", *createAccessKeyRes.AccessKey.AccessKeyId, err)) } @@ -185,7 +176,7 @@ func (b *backend) pathConfigRotateRootUpdate(ctx context.Context, req *logical.R AccessKeyId: aws.String(oldAccessKey), UserName: getUserRes.User.UserName, } - if _, err = iamClient.DeleteAccessKeyWithContext(ctx, &deleteAccessKeyInput); err != nil { + if _, err = iamClient.DeleteAccessKey(&deleteAccessKeyInput); err != nil { errs = multierror.Append(errs, fmt.Errorf("error deleting old access key ID %s: %w", oldAccessKey, err)) return nil, errs } diff --git a/builtin/credential/aws/path_config_rotate_root_test.go b/builtin/credential/aws/path_config_rotate_root_test.go index 3fe5b29c0479ce..940c6d10227092 100644 --- a/builtin/credential/aws/path_config_rotate_root_test.go +++ b/builtin/credential/aws/path_config_rotate_root_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -8,7 +5,6 @@ import ( "testing" "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/iam" "github.com/aws/aws-sdk-go/service/iam/iamiface" @@ -16,23 +12,9 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -type mockIAMClient awsutil.MockIAM - -func (m *mockIAMClient) GetUserWithContext(_ aws.Context, input *iam.GetUserInput, _ ...request.Option) (*iam.GetUserOutput, error) { - return (*awsutil.MockIAM)(m).GetUser(input) -} - -func (m *mockIAMClient) CreateAccessKeyWithContext(_ aws.Context, input *iam.CreateAccessKeyInput, _ ...request.Option) (*iam.CreateAccessKeyOutput, error) { - return (*awsutil.MockIAM)(m).CreateAccessKey(input) -} - -func (m *mockIAMClient) DeleteAccessKeyWithContext(_ aws.Context, input *iam.DeleteAccessKeyInput, _ ...request.Option) (*iam.DeleteAccessKeyOutput, error) { - return (*awsutil.MockIAM)(m).DeleteAccessKey(input) -} - func TestPathConfigRotateRoot(t *testing.T) { getIAMClient = func(sess *session.Session) iamiface.IAMAPI { - return &mockIAMClient{ + return &awsutil.MockIAM{ CreateAccessKeyOutput: &iam.CreateAccessKeyOutput{ AccessKey: &iam.AccessKey{ AccessKeyId: aws.String("fizz2"), diff --git a/builtin/credential/aws/path_config_sts.go b/builtin/credential/aws/path_config_sts.go index 50d986d20c84ee..3666a90041793c 100644 --- a/builtin/credential/aws/path_config_sts.go +++ b/builtin/credential/aws/path_config_sts.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -20,11 +17,6 @@ func (b *backend) pathListSts() *framework.Path { return &framework.Path{ Pattern: "config/sts/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "sts-role-relationships", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathStsList, @@ -39,12 +31,6 @@ func (b *backend) pathListSts() *framework.Path { func (b *backend) pathConfigSts() *framework.Path { return &framework.Path{ Pattern: "config/sts/" + framework.GenericNameRegex("account_id"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "sts-role", - }, - Fields: map[string]*framework.FieldSchema{ "account_id": { Type: framework.TypeString, diff --git a/builtin/credential/aws/path_config_tidy_identity_accesslist.go b/builtin/credential/aws/path_config_tidy_identity_accesslist.go index 686b0263c1aa9b..f89c5ab215974f 100644 --- a/builtin/credential/aws/path_config_tidy_identity_accesslist.go +++ b/builtin/credential/aws/path_config_tidy_identity_accesslist.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -18,11 +15,6 @@ const ( func (b *backend) pathConfigTidyIdentityAccessList() *framework.Path { return &framework.Path{ Pattern: fmt.Sprintf("%s$", "config/tidy/identity-accesslist"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - }, - Fields: map[string]*framework.FieldSchema{ "safety_buffer": { Type: framework.TypeDurationSecond, @@ -42,29 +34,15 @@ expiration, before it is removed from the backend storage.`, Operations: map[logical.Operation]framework.OperationHandler{ logical.CreateOperation: &framework.PathOperation{ Callback: b.pathConfigTidyIdentityAccessListCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "identity-access-list-tidy-operation", - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathConfigTidyIdentityAccessListCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "identity-access-list-tidy-operation", - }, }, logical.ReadOperation: &framework.PathOperation{ Callback: b.pathConfigTidyIdentityAccessListRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "identity-access-list-tidy-settings", - }, }, logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathConfigTidyIdentityAccessListDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "identity-access-list-tidy-settings", - }, }, }, diff --git a/builtin/credential/aws/path_config_tidy_roletag_denylist.go b/builtin/credential/aws/path_config_tidy_roletag_denylist.go index fa82b77d25f2a9..e00404d7ec647d 100644 --- a/builtin/credential/aws/path_config_tidy_roletag_denylist.go +++ b/builtin/credential/aws/path_config_tidy_roletag_denylist.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -17,11 +14,6 @@ const ( func (b *backend) pathConfigTidyRoletagDenyList() *framework.Path { return &framework.Path{ Pattern: "config/tidy/roletag-denylist$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - }, - Fields: map[string]*framework.FieldSchema{ "safety_buffer": { Type: framework.TypeDurationSecond, @@ -43,29 +35,15 @@ Defaults to 4320h (180 days).`, Operations: map[logical.Operation]framework.OperationHandler{ logical.CreateOperation: &framework.PathOperation{ Callback: b.pathConfigTidyRoletagDenyListCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "role-tag-deny-list-tidy-operation", - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathConfigTidyRoletagDenyListCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "role-tag-deny-list-tidy-operation", - }, }, logical.ReadOperation: &framework.PathOperation{ Callback: b.pathConfigTidyRoletagDenyListRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "role-tag-deny-list-tidy-settings", - }, }, logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathConfigTidyRoletagDenyListDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "role-tag-deny-list-tidy-settings", - }, }, }, diff --git a/builtin/credential/aws/path_identity_accesslist.go b/builtin/credential/aws/path_identity_accesslist.go index 77ec5749496dff..a622b7d8f962f3 100644 --- a/builtin/credential/aws/path_identity_accesslist.go +++ b/builtin/credential/aws/path_identity_accesslist.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -16,12 +13,6 @@ const identityAccessListStorage = "whitelist/identity/" func (b *backend) pathIdentityAccessList() *framework.Path { return &framework.Path{ Pattern: "identity-accesslist/" + framework.GenericNameRegex("instance_id"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "identity-access-list", - }, - Fields: map[string]*framework.FieldSchema{ "instance_id": { Type: framework.TypeString, @@ -48,11 +39,6 @@ func (b *backend) pathListIdentityAccessList() *framework.Path { return &framework.Path{ Pattern: "identity-accesslist/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "identity-access-list", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathAccessListIdentitiesList, diff --git a/builtin/credential/aws/path_login.go b/builtin/credential/aws/path_login.go index f4041b38db8397..fb8ab4f47492f8 100644 --- a/builtin/credential/aws/path_login.go +++ b/builtin/credential/aws/path_login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -55,10 +52,6 @@ var ( func (b *backend) pathLogin() *framework.Path { return &framework.Path{ Pattern: "login$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationVerb: "login", - }, Fields: map[string]*framework.FieldSchema{ "role": { Type: framework.TypeString, @@ -110,8 +103,8 @@ This must match the request body included in the signature.`, "iam_request_headers": { Type: framework.TypeHeader, Description: `Key/value pairs of headers for use in the -sts:GetCallerIdentity HTTP requests headers when auth_type is iam. Can be either -a Base64-encoded, JSON-serialized string, or a JSON object of key/value pairs. +sts:GetCallerIdentity HTTP requests headers when auth_type is iam. Can be either +a Base64-encoded, JSON-serialized string, or a JSON object of key/value pairs. This must at a minimum include the headers over which AWS has included a signature.`, }, "identity": { @@ -344,7 +337,7 @@ func (b *backend) pathLoginResolveRoleIam(ctx context.Context, req *logical.Requ // instanceIamRoleARN fetches the IAM role ARN associated with the given // instance profile name -func (b *backend) instanceIamRoleARN(ctx context.Context, iamClient *iam.IAM, instanceProfileName string) (string, error) { +func (b *backend) instanceIamRoleARN(iamClient *iam.IAM, instanceProfileName string) (string, error) { if iamClient == nil { return "", fmt.Errorf("nil iamClient") } @@ -352,7 +345,7 @@ func (b *backend) instanceIamRoleARN(ctx context.Context, iamClient *iam.IAM, in return "", fmt.Errorf("missing instance profile name") } - profile, err := iamClient.GetInstanceProfileWithContext(ctx, &iam.GetInstanceProfileInput{ + profile, err := iamClient.GetInstanceProfile(&iam.GetInstanceProfileInput{ InstanceProfileName: aws.String(instanceProfileName), }) if err != nil { @@ -386,7 +379,7 @@ func (b *backend) validateInstance(ctx context.Context, s logical.Storage, insta return nil, err } - status, err := ec2Client.DescribeInstancesWithContext(ctx, &ec2.DescribeInstancesInput{ + status, err := ec2Client.DescribeInstances(&ec2.DescribeInstancesInput{ InstanceIds: []*string{ aws.String(instanceID), }, @@ -728,7 +721,7 @@ func (b *backend) verifyInstanceMeetsRoleRequirements(ctx context.Context, } else if iamClient == nil { return nil, fmt.Errorf("received a nil iamClient") } - iamRoleARN, err := b.instanceIamRoleARN(ctx, iamClient, iamInstanceProfileEntity.FriendlyName) + iamRoleARN, err := b.instanceIamRoleARN(iamClient, iamInstanceProfileEntity.FriendlyName) if err != nil { return nil, fmt.Errorf("IAM role ARN could not be fetched: %w", err) } @@ -1839,7 +1832,7 @@ func (b *backend) fullArn(ctx context.Context, e *iamEntity, s logical.Storage) input := iam.GetUserInput{ UserName: aws.String(e.FriendlyName), } - resp, err := client.GetUserWithContext(ctx, &input) + resp, err := client.GetUser(&input) if err != nil { return "", fmt.Errorf("error fetching user %q: %w", e.FriendlyName, err) } @@ -1853,7 +1846,7 @@ func (b *backend) fullArn(ctx context.Context, e *iamEntity, s logical.Storage) input := iam.GetRoleInput{ RoleName: aws.String(e.FriendlyName), } - resp, err := client.GetRoleWithContext(ctx, &input) + resp, err := client.GetRole(&input) if err != nil { return "", fmt.Errorf("error fetching role %q: %w", e.FriendlyName, err) } diff --git a/builtin/credential/aws/path_login_test.go b/builtin/credential/aws/path_login_test.go index 2c0262075ad381..6ffd60ed14943f 100644 --- a/builtin/credential/aws/path_login_test.go +++ b/builtin/credential/aws/path_login_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( diff --git a/builtin/credential/aws/path_role.go b/builtin/credential/aws/path_role.go index 1c9ecf27283276..12a4c7d0f2d9f4 100644 --- a/builtin/credential/aws/path_role.go +++ b/builtin/credential/aws/path_role.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -23,12 +20,6 @@ var currentRoleStorageVersion = 3 func (b *backend) pathRole() *framework.Path { p := &framework.Path{ Pattern: "role/" + framework.GenericNameRegex("role"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "auth-role", - }, - Fields: map[string]*framework.FieldSchema{ "role": { Type: framework.TypeString, @@ -87,9 +78,6 @@ auth_type is ec2 or inferred_entity_type is ec2_instance.`, given instance IDs. Can be a list or comma-separated string of EC2 instance IDs. This is only applicable when auth_type is ec2 or inferred_entity_type is ec2_instance.`, - DisplayAttrs: &framework.DisplayAttributes{ - Description: "If set, defines a constraint on the EC2 instances to have one of the given instance IDs. A list of EC2 instance IDs. This is only applicable when auth_type is ec2 or inferred_entity_type is ec2_instance.", - }, }, "resolve_aws_unique_ids": { Type: framework.TypeBool, @@ -211,11 +199,6 @@ func (b *backend) pathListRole() *framework.Path { return &framework.Path{ Pattern: "role/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "auth-roles", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathRoleList, @@ -231,11 +214,6 @@ func (b *backend) pathListRoles() *framework.Path { return &framework.Path{ Pattern: "roles/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "auth-roles2", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathRoleList, diff --git a/builtin/credential/aws/path_role_tag.go b/builtin/credential/aws/path_role_tag.go index 180b4105c69c88..15927a82a2bb61 100644 --- a/builtin/credential/aws/path_role_tag.go +++ b/builtin/credential/aws/path_role_tag.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -26,12 +23,6 @@ const roleTagVersion = "v1" func (b *backend) pathRoleTag() *framework.Path { return &framework.Path{ Pattern: "role/" + framework.GenericNameRegex("role") + "/tag$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "role-tag", - }, - Fields: map[string]*framework.FieldSchema{ "role": { Type: framework.TypeString, diff --git a/builtin/credential/aws/path_role_test.go b/builtin/credential/aws/path_role_test.go index 3a63d4cd35553d..b8e824a9d27c42 100644 --- a/builtin/credential/aws/path_role_test.go +++ b/builtin/credential/aws/path_role_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( diff --git a/builtin/credential/aws/path_roletag_denylist.go b/builtin/credential/aws/path_roletag_denylist.go index 82004363530bb5..19520aab2f59fe 100644 --- a/builtin/credential/aws/path_roletag_denylist.go +++ b/builtin/credential/aws/path_roletag_denylist.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -15,12 +12,6 @@ import ( func (b *backend) pathRoletagDenyList() *framework.Path { return &framework.Path{ Pattern: "roletag-denylist/(?P.*)", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "role-tag-deny-list", - }, - Fields: map[string]*framework.FieldSchema{ "role_tag": { Type: framework.TypeString, @@ -51,11 +42,6 @@ func (b *backend) pathListRoletagDenyList() *framework.Path { return &framework.Path{ Pattern: "roletag-denylist/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "role-tag-deny-lists", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathRoletagDenyListsList, diff --git a/builtin/credential/aws/path_tidy_identity_accesslist.go b/builtin/credential/aws/path_tidy_identity_accesslist.go index 3b907c43d3788f..9455cc0d3df1a3 100644 --- a/builtin/credential/aws/path_tidy_identity_accesslist.go +++ b/builtin/credential/aws/path_tidy_identity_accesslist.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -18,13 +15,6 @@ import ( func (b *backend) pathTidyIdentityAccessList() *framework.Path { return &framework.Path{ Pattern: "tidy/identity-accesslist$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "identity-access-list", - OperationVerb: "tidy", - }, - Fields: map[string]*framework.FieldSchema{ "safety_buffer": { Type: framework.TypeDurationSecond, diff --git a/builtin/credential/aws/path_tidy_roletag_denylist.go b/builtin/credential/aws/path_tidy_roletag_denylist.go index ddd1f7944d5731..80c9dd8afea723 100644 --- a/builtin/credential/aws/path_tidy_roletag_denylist.go +++ b/builtin/credential/aws/path_tidy_roletag_denylist.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package awsauth import ( @@ -22,13 +19,6 @@ const ( func (b *backend) pathTidyRoletagDenyList() *framework.Path { return &framework.Path{ Pattern: "tidy/roletag-denylist$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "role-tag-deny-list", - OperationVerb: "tidy", - }, - Fields: map[string]*framework.FieldSchema{ "safety_buffer": { Type: framework.TypeDurationSecond, diff --git a/builtin/credential/cert/backend.go b/builtin/credential/cert/backend.go index 61dd88988e361a..567ef8163a8dc6 100644 --- a/builtin/credential/cert/backend.go +++ b/builtin/credential/cert/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( @@ -21,8 +18,6 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -const operationPrefixCert = "cert" - func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b := Backend() if err := b.Setup(ctx, conf); err != nil { diff --git a/builtin/credential/cert/backend_test.go b/builtin/credential/cert/backend_test.go index 47d7ae05d54428..56d44ea4a2d014 100644 --- a/builtin/credential/cert/backend_test.go +++ b/builtin/credential/cert/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( diff --git a/builtin/credential/cert/cli.go b/builtin/credential/cert/cli.go index 3ba1e712d13443..4a470c89616f8c 100644 --- a/builtin/credential/cert/cli.go +++ b/builtin/credential/cert/cli.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( diff --git a/builtin/credential/cert/cmd/cert/main.go b/builtin/credential/cert/cmd/cert/main.go index 5b80a54cde778b..09018ec3f040d4 100644 --- a/builtin/credential/cert/cmd/cert/main.go +++ b/builtin/credential/cert/cmd/cert/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: cert.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/credential/cert/path_certs.go b/builtin/credential/cert/path_certs.go index 03a3e5586210e5..a0f84a9fe18d2d 100644 --- a/builtin/credential/cert/path_certs.go +++ b/builtin/credential/cert/path_certs.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( @@ -21,33 +18,22 @@ func pathListCerts(b *backend) *framework.Path { return &framework.Path{ Pattern: "certs/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixCert, - OperationSuffix: "certificates", - Navigation: true, - ItemType: "Certificate", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathCertList, }, HelpSynopsis: pathCertHelpSyn, HelpDescription: pathCertHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Navigation: true, + ItemType: "Certificate", + }, } } func pathCerts(b *backend) *framework.Path { p := &framework.Path{ Pattern: "certs/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixCert, - OperationSuffix: "certificate", - Action: "Create", - ItemType: "Certificate", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -77,9 +63,6 @@ Must be x509 PEM encoded.`, Type: framework.TypeCommaStringSlice, Description: `A comma-separated list of OCSP server addresses. If unset, the OCSP server is determined from the AuthorityInformationAccess extension on the certificate being inspected.`, - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of OCSP server addresses. If unset, the OCSP server is determined from the AuthorityInformationAccess extension on the certificate being inspected.", - }, }, "ocsp_fail_open": { Type: framework.TypeBool, @@ -98,8 +81,7 @@ At least one must exist in either the Common Name or SANs. Supports globbing. This parameter is deprecated, please use allowed_common_names, allowed_dns_sans, allowed_email_sans, allowed_uri_sans.`, DisplayAttrs: &framework.DisplayAttributes{ - Group: "Constraints", - Description: "A list of names. At least one must exist in either the Common Name or SANs. Supports globbing. This parameter is deprecated, please use allowed_common_names, allowed_dns_sans, allowed_email_sans, allowed_uri_sans.", + Group: "Constraints", }, }, @@ -108,8 +90,7 @@ allowed_email_sans, allowed_uri_sans.`, Description: `A comma-separated list of names. At least one must exist in the Common Name. Supports globbing.`, DisplayAttrs: &framework.DisplayAttributes{ - Group: "Constraints", - Description: "A list of names. At least one must exist in the Common Name. Supports globbing.", + Group: "Constraints", }, }, @@ -118,9 +99,8 @@ At least one must exist in the Common Name. Supports globbing.`, Description: `A comma-separated list of DNS names. At least one must exist in the SANs. Supports globbing.`, DisplayAttrs: &framework.DisplayAttributes{ - Name: "Allowed DNS SANs", - Group: "Constraints", - Description: "A list of DNS names. At least one must exist in the SANs. Supports globbing.", + Name: "Allowed DNS SANs", + Group: "Constraints", }, }, @@ -129,9 +109,8 @@ At least one must exist in the SANs. Supports globbing.`, Description: `A comma-separated list of Email Addresses. At least one must exist in the SANs. Supports globbing.`, DisplayAttrs: &framework.DisplayAttributes{ - Name: "Allowed Email SANs", - Group: "Constraints", - Description: "A list of Email Addresses. At least one must exist in the SANs. Supports globbing.", + Name: "Allowed Email SANs", + Group: "Constraints", }, }, @@ -140,9 +119,8 @@ At least one must exist in the SANs. Supports globbing.`, Description: `A comma-separated list of URIs. At least one must exist in the SANs. Supports globbing.`, DisplayAttrs: &framework.DisplayAttributes{ - Name: "Allowed URI SANs", - Group: "Constraints", - Description: "A list of URIs. At least one must exist in the SANs. Supports globbing.", + Name: "Allowed URI SANs", + Group: "Constraints", }, }, @@ -151,8 +129,7 @@ At least one must exist in the SANs. Supports globbing.`, Description: `A comma-separated list of Organizational Units names. At least one must exist in the OU field.`, DisplayAttrs: &framework.DisplayAttributes{ - Group: "Constraints", - Description: "A list of Organizational Units names. At least one must exist in the OU field.", + Group: "Constraints", }, }, @@ -161,9 +138,6 @@ At least one must exist in the OU field.`, Description: `A comma-separated string or array of extensions formatted as "oid:value". Expects the extension value to be some type of ASN1 encoded string. All values much match. Supports globbing on "value".`, - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of extensions formatted as 'oid:value'. Expects the extension value to be some type of ASN1 encoded string. All values much match. Supports globbing on 'value'.", - }, }, "allowed_metadata_extensions": { @@ -172,9 +146,6 @@ All values much match. Supports globbing on "value".`, Upon successful authentication, these extensions will be added as metadata if they are present in the certificate. The metadata key will be the string consisting of the oid numbers separated by a dash (-) instead of a dot (.) to allow usage in ACL templates.`, - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of OID extensions. Upon successful authentication, these extensions will be added as metadata if they are present in the certificate. The metadata key will be the string consisting of the OID numbers separated by a dash (-) instead of a dot (.) to allow usage in ACL templates.", - }, }, "display_name": { @@ -228,6 +199,10 @@ certificate.`, HelpSynopsis: pathCertHelpSyn, HelpDescription: pathCertHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Create", + ItemType: "Certificate", + }, } tokenutil.AddTokenFields(p.Fields) diff --git a/builtin/credential/cert/path_config.go b/builtin/credential/cert/path_config.go index 6f1f2903723550..c08992af15c43e 100644 --- a/builtin/credential/cert/path_config.go +++ b/builtin/credential/cert/path_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( @@ -16,11 +13,6 @@ const maxCacheSize = 100000 func pathConfig(b *backend) *framework.Path { return &framework.Path{ Pattern: "config", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixCert, - }, - Fields: map[string]*framework.FieldSchema{ "disable_binding": { Type: framework.TypeBool, @@ -39,19 +31,9 @@ func pathConfig(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - }, - }, - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "configuration", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.pathConfigWrite, + logical.ReadOperation: b.pathConfigRead, }, } } diff --git a/builtin/credential/cert/path_crls.go b/builtin/credential/cert/path_crls.go index 9dd710758cef15..75edcc32c46dbd 100644 --- a/builtin/credential/cert/path_crls.go +++ b/builtin/credential/cert/path_crls.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( @@ -22,10 +19,6 @@ import ( func pathListCRLs(b *backend) *framework.Path { return &framework.Path{ Pattern: "crls/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixCert, - OperationSuffix: "crls", - }, Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathCRLsList, @@ -48,12 +41,6 @@ func (b *backend) pathCRLsList(ctx context.Context, req *logical.Request, d *fra func pathCRLs(b *backend) *framework.Path { return &framework.Path{ Pattern: "crls/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixCert, - OperationSuffix: "crl", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/credential/cert/path_crls_test.go b/builtin/credential/cert/path_crls_test.go index 24211f5cad5bb9..9ca1f1243c19c5 100644 --- a/builtin/credential/cert/path_crls_test.go +++ b/builtin/credential/cert/path_crls_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( diff --git a/builtin/credential/cert/path_login.go b/builtin/credential/cert/path_login.go index d59c5b4a9195da..59c48e76f28d29 100644 --- a/builtin/credential/cert/path_login.go +++ b/builtin/credential/cert/path_login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( @@ -15,15 +12,14 @@ import ( "fmt" "strings" + "github.com/hashicorp/vault/sdk/helper/ocsp" + "github.com/hashicorp/vault/sdk/framework" "github.com/hashicorp/vault/sdk/helper/certutil" - "github.com/hashicorp/vault/sdk/helper/cidrutil" - "github.com/hashicorp/vault/sdk/helper/ocsp" "github.com/hashicorp/vault/sdk/helper/policyutil" "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/errwrap" - "github.com/hashicorp/go-multierror" + "github.com/hashicorp/vault/sdk/helper/cidrutil" glob "github.com/ryanuber/go-glob" ) @@ -36,10 +32,6 @@ type ParsedCert struct { func pathLogin(b *backend) *framework.Path { return &framework.Path{ Pattern: "login", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixCert, - OperationVerb: "login", - }, Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -272,7 +264,6 @@ func (b *backend) verifyCredentials(ctx context.Context, req *logical.Request, d // If trustedNonCAs is not empty it means that client had registered a non-CA cert // with the backend. - var retErr error if len(trustedNonCAs) != 0 { for _, trustedNonCA := range trustedNonCAs { tCert := trustedNonCA.Certificates[0] @@ -280,19 +271,9 @@ func (b *backend) verifyCredentials(ctx context.Context, req *logical.Request, d if tCert.SerialNumber.Cmp(clientCert.SerialNumber) == 0 && bytes.Equal(tCert.AuthorityKeyId, clientCert.AuthorityKeyId) { matches, err := b.matchesConstraints(ctx, clientCert, trustedNonCA.Certificates, trustedNonCA, verifyConf) - - // matchesConstraints returns an error when OCSP verification fails, - // but some other path might still give us success. Add to the - // retErr multierror, but avoid duplicates. This way, if we reach a - // failure later, we can give additional context. - // - // XXX: If matchesConstraints is updated to generate additional, - // immediately fatal errors, we likely need to extend it to return - // another boolean (fatality) or other detection scheme. - if err != nil && (retErr == nil || !errwrap.Contains(retErr, err.Error())) { - retErr = multierror.Append(retErr, err) + if err != nil { + return nil, nil, err } - if matches { return trustedNonCA, nil, nil } @@ -303,36 +284,23 @@ func (b *backend) verifyCredentials(ctx context.Context, req *logical.Request, d // If no trusted chain was found, client is not authenticated // This check happens after checking for a matching configured non-CA certs if len(trustedChains) == 0 { - if retErr == nil { - return nil, logical.ErrorResponse(fmt.Sprintf("invalid certificate or no client certificate supplied; additionally got errors during verification: %v", retErr)), nil - } return nil, logical.ErrorResponse("invalid certificate or no client certificate supplied"), nil } // Search for a ParsedCert that intersects with the validated chains and any additional constraints + matches := make([]*ParsedCert, 0) for _, trust := range trusted { // For each ParsedCert in the config for _, tCert := range trust.Certificates { // For each certificate in the entry for _, chain := range trustedChains { // For each root chain that we matched for _, cCert := range chain { // For each cert in the matched chain if tCert.Equal(cCert) { // ParsedCert intersects with matched chain match, err := b.matchesConstraints(ctx, clientCert, chain, trust, verifyConf) // validate client cert + matched chain against the config - - // See note above. - if err != nil && (retErr == nil || !errwrap.Contains(retErr, err.Error())) { - retErr = multierror.Append(retErr, err) + if err != nil { + return nil, nil, err } - - // Return the first matching entry (for backwards - // compatibility, we continue to just pick the first - // one if we have multiple matches). - // - // Here, we return directly: this means that any - // future OCSP errors would be ignored; in the future, - // if these become fatal, we could revisit this - // choice and choose the first match after evaluating - // all possible candidates. - if match && err == nil { - return trust, nil, nil + if match { + // Add the match to the list + matches = append(matches, trust) } } } @@ -340,11 +308,13 @@ func (b *backend) verifyCredentials(ctx context.Context, req *logical.Request, d } } - if retErr != nil { - return nil, logical.ErrorResponse(fmt.Sprintf("no chain matching all constraints could be found for this login certificate; additionally got errors during verification: %v", retErr)), nil + // Fail on no matches + if len(matches) == 0 { + return nil, logical.ErrorResponse("no chain matching all constraints could be found for this login certificate"), nil } - return nil, logical.ErrorResponse("no chain matching all constraints could be found for this login certificate"), nil + // Return the first matching entry (for backwards compatibility, we continue to just pick one if multiple match) + return matches[0], nil, nil } func (b *backend) matchesConstraints(ctx context.Context, clientCert *x509.Certificate, trustedChain []*x509.Certificate, @@ -631,12 +601,6 @@ func (b *backend) checkForCertInOCSP(ctx context.Context, clientCert *x509.Certi defer b.ocspClientMutex.RUnlock() err := b.ocspClient.VerifyLeafCertificate(ctx, clientCert, chain[1], conf) if err != nil { - // We want to preserve error messages when they have additional, - // potentially useful information. Just having a revoked cert - // isn't additionally useful. - if !strings.Contains(err.Error(), "has been revoked") { - return false, err - } return false, nil } return true, nil diff --git a/builtin/credential/cert/path_login_test.go b/builtin/credential/cert/path_login_test.go index d86bd31bd92eba..a2d6f2e292a2f3 100644 --- a/builtin/credential/cert/path_login_test.go +++ b/builtin/credential/cert/path_login_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( diff --git a/builtin/credential/cert/test_responder.go b/builtin/credential/cert/test_responder.go index d68ebe080e0837..1c7c75b2ff339d 100644 --- a/builtin/credential/cert/test_responder.go +++ b/builtin/credential/cert/test_responder.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Package ocsp implements an OCSP responder based on a generic storage backend. // It provides a couple of sample implementations. // Because OCSP responders handle high query volumes, we have to be careful diff --git a/builtin/credential/github/backend.go b/builtin/credential/github/backend.go index f8bbcc403c61d2..89ce37c7cd6d0f 100644 --- a/builtin/credential/github/backend.go +++ b/builtin/credential/github/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package github import ( @@ -14,8 +11,6 @@ import ( "golang.org/x/oauth2" ) -const operationPrefixGithub = "github" - func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b := Backend() if err := b.Setup(ctx, conf); err != nil { @@ -33,17 +28,6 @@ func Backend() *backend { DefaultKey: "default", } - teamMapPaths := b.TeamMap.Paths() - - teamMapPaths[0].DisplayAttrs = &framework.DisplayAttributes{ - OperationPrefix: operationPrefixGithub, - OperationSuffix: "teams", - } - teamMapPaths[1].DisplayAttrs = &framework.DisplayAttributes{ - OperationPrefix: operationPrefixGithub, - OperationSuffix: "team-mapping", - } - b.UserMap = &framework.PolicyMap{ PathMap: framework.PathMap{ Name: "users", @@ -51,18 +35,7 @@ func Backend() *backend { DefaultKey: "default", } - userMapPaths := b.UserMap.Paths() - - userMapPaths[0].DisplayAttrs = &framework.DisplayAttributes{ - OperationPrefix: operationPrefixGithub, - OperationSuffix: "users", - } - userMapPaths[1].DisplayAttrs = &framework.DisplayAttributes{ - OperationPrefix: operationPrefixGithub, - OperationSuffix: "user-mapping", - } - - allPaths := append(teamMapPaths, userMapPaths...) + allPaths := append(b.TeamMap.Paths(), b.UserMap.Paths()...) b.Backend = &framework.Backend{ Help: backendHelp, diff --git a/builtin/credential/github/backend_test.go b/builtin/credential/github/backend_test.go index 6ea08ee58134ef..f3360f52cfb57d 100644 --- a/builtin/credential/github/backend_test.go +++ b/builtin/credential/github/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package github import ( diff --git a/builtin/credential/github/cli.go b/builtin/credential/github/cli.go index d40f1b56d9e7e5..bccc6fa516e2d7 100644 --- a/builtin/credential/github/cli.go +++ b/builtin/credential/github/cli.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package github import ( diff --git a/builtin/credential/github/cmd/github/main.go b/builtin/credential/github/cmd/github/main.go index 499469a0f68133..be4fbb64ca65fc 100644 --- a/builtin/credential/github/cmd/github/main.go +++ b/builtin/credential/github/cmd/github/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: github.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/credential/github/path_config.go b/builtin/credential/github/path_config.go index 83238f32c49b43..707115c567e339 100644 --- a/builtin/credential/github/path_config.go +++ b/builtin/credential/github/path_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package github import ( @@ -20,11 +17,6 @@ import ( func pathConfig(b *backend) *framework.Path { p := &framework.Path{ Pattern: "config", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixGithub, - }, - Fields: map[string]*framework.FieldSchema{ "organization": { Type: framework.TypeString, @@ -57,20 +49,9 @@ API-compatible authentication server.`, }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixGithub, - OperationVerb: "configure", - }, - }, - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "configuration", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.pathConfigWrite, + logical.ReadOperation: b.pathConfigRead, }, } diff --git a/builtin/credential/github/path_config_test.go b/builtin/credential/github/path_config_test.go index 2f592b21f7b8cc..d59599f3262096 100644 --- a/builtin/credential/github/path_config_test.go +++ b/builtin/credential/github/path_config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package github import ( diff --git a/builtin/credential/github/path_login.go b/builtin/credential/github/path_login.go index 4ee94e5d5c6707..252b5641cd4fe4 100644 --- a/builtin/credential/github/path_login.go +++ b/builtin/credential/github/path_login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package github import ( @@ -19,12 +16,6 @@ import ( func pathLogin(b *backend) *framework.Path { return &framework.Path{ Pattern: "login", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixGithub, - OperationVerb: "login", - }, - Fields: map[string]*framework.FieldSchema{ "token": { Type: framework.TypeString, diff --git a/builtin/credential/github/path_login_test.go b/builtin/credential/github/path_login_test.go index 282e3fa9401df6..25baf7f811e8a8 100644 --- a/builtin/credential/github/path_login_test.go +++ b/builtin/credential/github/path_login_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package github import ( diff --git a/builtin/credential/ldap/backend.go b/builtin/credential/ldap/backend.go index ffc1d1ee8c39fc..35e0f102c3961e 100644 --- a/builtin/credential/ldap/backend.go +++ b/builtin/credential/ldap/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( @@ -14,10 +11,7 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -const ( - operationPrefixLDAP = "ldap" - errUserBindFailed = "ldap operation failed: failed to bind as user" -) +const errUserBindFailed = `ldap operation failed: failed to bind as user` func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b := Backend() diff --git a/builtin/credential/ldap/backend_test.go b/builtin/credential/ldap/backend_test.go index beda248e4b0905..74dfdf99ed074e 100644 --- a/builtin/credential/ldap/backend_test.go +++ b/builtin/credential/ldap/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( @@ -1242,7 +1239,6 @@ func TestLdapAuthBackend_ConfigUpgrade(t *testing.T) { RequestTimeout: cfg.RequestTimeout, ConnectionTimeout: cfg.ConnectionTimeout, UsernameAsAlias: false, - DerefAliases: "never", MaximumPageSize: 1000, }, } diff --git a/builtin/credential/ldap/cli.go b/builtin/credential/ldap/cli.go index 772603434940a2..e0d744b4caadbe 100644 --- a/builtin/credential/ldap/cli.go +++ b/builtin/credential/ldap/cli.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( @@ -29,15 +26,12 @@ func (h *CLIHandler) Auth(c *api.Client, m map[string]string) (*api.Secret, erro } password, ok := m["password"] if !ok { - password = passwordFromEnv() - if password == "" { - fmt.Fprintf(os.Stderr, "Password (will be hidden): ") - var err error - password, err = pwd.Read(os.Stdin) - fmt.Fprintf(os.Stderr, "\n") - if err != nil { - return nil, err - } + fmt.Fprintf(os.Stderr, "Password (will be hidden): ") + var err error + password, err = pwd.Read(os.Stdin) + fmt.Fprintf(os.Stderr, "\n") + if err != nil { + return nil, err } } @@ -76,9 +70,8 @@ Usage: vault login -method=ldap [CONFIG K=V...] Configuration: password= - LDAP password to use for authentication. If not provided, it will use - the VAULT_LDAP_PASSWORD environment variable. If this is not set, the - CLI will prompt for this on stdin. + LDAP password to use for authentication. If not provided, the CLI will + prompt for this on stdin. username= LDAP username to use for authentication. @@ -96,7 +89,3 @@ func usernameFromEnv() string { } return "" } - -func passwordFromEnv() string { - return os.Getenv("VAULT_LDAP_PASSWORD") -} diff --git a/builtin/credential/ldap/cmd/ldap/main.go b/builtin/credential/ldap/cmd/ldap/main.go index 2dcb802e209274..b632c011ce136a 100644 --- a/builtin/credential/ldap/cmd/ldap/main.go +++ b/builtin/credential/ldap/cmd/ldap/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: ldap.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/credential/ldap/path_config.go b/builtin/credential/ldap/path_config.go index a06c6666f8857b..45e5294c79d932 100644 --- a/builtin/credential/ldap/path_config.go +++ b/builtin/credential/ldap/path_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( @@ -19,31 +16,18 @@ const userFilterWarning = "userfilter configured does not consider userattr and func pathConfig(b *backend) *framework.Path { p := &framework.Path{ Pattern: `config`, + Fields: ldaputil.ConfigFields(), - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixLDAP, - Action: "Configure", - }, - - Fields: ldaputil.ConfigFields(), - - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "auth-configuration", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure-auth", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathConfigRead, + logical.UpdateOperation: b.pathConfigWrite, }, HelpSynopsis: pathConfigHelpSyn, HelpDescription: pathConfigHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Configure", + }, } tokenutil.AddTokenFields(p.Fields) diff --git a/builtin/credential/ldap/path_groups.go b/builtin/credential/ldap/path_groups.go index 08ac00d7fd2c2b..b39691cf8174f2 100644 --- a/builtin/credential/ldap/path_groups.go +++ b/builtin/credential/ldap/path_groups.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( @@ -16,33 +13,22 @@ func pathGroupsList(b *backend) *framework.Path { return &framework.Path{ Pattern: "groups/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixLDAP, - OperationSuffix: "groups", - Navigation: true, - ItemType: "Group", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathGroupList, }, HelpSynopsis: pathGroupHelpSyn, HelpDescription: pathGroupHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Navigation: true, + ItemType: "Group", + }, } } func pathGroups(b *backend) *framework.Path { return &framework.Path{ Pattern: `groups/(?P.+)`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixLDAP, - OperationSuffix: "group", - Action: "Create", - ItemType: "Group", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -52,9 +38,6 @@ func pathGroups(b *backend) *framework.Path { "policies": { Type: framework.TypeCommaStringSlice, Description: "Comma-separated list of policies associated to the group.", - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of policies associated to the group.", - }, }, }, @@ -66,6 +49,10 @@ func pathGroups(b *backend) *framework.Path { HelpSynopsis: pathGroupHelpSyn, HelpDescription: pathGroupHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Create", + ItemType: "Group", + }, } } diff --git a/builtin/credential/ldap/path_login.go b/builtin/credential/ldap/path_login.go index 440e7253992128..67303911e5a178 100644 --- a/builtin/credential/ldap/path_login.go +++ b/builtin/credential/ldap/path_login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( @@ -16,12 +13,6 @@ import ( func pathLogin(b *backend) *framework.Path { return &framework.Path{ Pattern: `login/(?P.+)`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixLDAP, - OperationVerb: "login", - }, - Fields: map[string]*framework.FieldSchema{ "username": { Type: framework.TypeString, diff --git a/builtin/credential/ldap/path_users.go b/builtin/credential/ldap/path_users.go index 1ce252d1df5586..a4e18d30eb6d8f 100644 --- a/builtin/credential/ldap/path_users.go +++ b/builtin/credential/ldap/path_users.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( @@ -17,33 +14,22 @@ func pathUsersList(b *backend) *framework.Path { return &framework.Path{ Pattern: "users/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixLDAP, - OperationSuffix: "users", - Navigation: true, - ItemType: "User", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathUserList, }, HelpSynopsis: pathUserHelpSyn, HelpDescription: pathUserHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Navigation: true, + ItemType: "User", + }, } } func pathUsers(b *backend) *framework.Path { return &framework.Path{ Pattern: `users/(?P.+)`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixLDAP, - OperationSuffix: "user", - Action: "Create", - ItemType: "User", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -53,17 +39,11 @@ func pathUsers(b *backend) *framework.Path { "groups": { Type: framework.TypeCommaStringSlice, Description: "Comma-separated list of additional groups associated with the user.", - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of additional groups associated with the user.", - }, }, "policies": { Type: framework.TypeCommaStringSlice, Description: "Comma-separated list of policies associated with the user.", - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of policies associated with the user.", - }, }, }, @@ -75,6 +55,10 @@ func pathUsers(b *backend) *framework.Path { HelpSynopsis: pathUserHelpSyn, HelpDescription: pathUserHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Create", + ItemType: "User", + }, } } diff --git a/builtin/credential/okta/backend.go b/builtin/credential/okta/backend.go index 04dba968ab36f8..58ba6b523f9da5 100644 --- a/builtin/credential/okta/backend.go +++ b/builtin/credential/okta/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package okta import ( @@ -18,9 +15,8 @@ import ( ) const ( - operationPrefixOkta = "okta" - mfaPushMethod = "push" - mfaTOTPMethod = "token:software:totp" + mfaPushMethod = "push" + mfaTOTPMethod = "token:software:totp" ) func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { diff --git a/builtin/credential/okta/backend_test.go b/builtin/credential/okta/backend_test.go index 85642e802a4983..749b511eb0f99b 100644 --- a/builtin/credential/okta/backend_test.go +++ b/builtin/credential/okta/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package okta import ( diff --git a/builtin/credential/okta/cli.go b/builtin/credential/okta/cli.go index df252960a2c22d..d2f50d5688e22d 100644 --- a/builtin/credential/okta/cli.go +++ b/builtin/credential/okta/cli.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package okta import ( diff --git a/builtin/credential/okta/cmd/okta/main.go b/builtin/credential/okta/cmd/okta/main.go index e28b34a016c8c3..e2452ba4b8ad9e 100644 --- a/builtin/credential/okta/cmd/okta/main.go +++ b/builtin/credential/okta/cmd/okta/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: okta.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/credential/okta/path_config.go b/builtin/credential/okta/path_config.go index 045d4fdb377922..7fc93efb87c7d2 100644 --- a/builtin/credential/okta/path_config.go +++ b/builtin/credential/okta/path_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package okta import ( @@ -27,12 +24,6 @@ const ( func pathConfig(b *backend) *framework.Path { p := &framework.Path{ Pattern: `config`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixOkta, - Action: "Configure", - }, - Fields: map[string]*framework.FieldSchema{ "organization": { Type: framework.TypeString, @@ -89,30 +80,18 @@ func pathConfig(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "configuration", - }, - }, - logical.CreateOperation: &framework.PathOperation{ - Callback: b.pathConfigWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathConfigRead, + logical.CreateOperation: b.pathConfigWrite, + logical.UpdateOperation: b.pathConfigWrite, }, ExistenceCheck: b.pathConfigExistenceCheck, HelpSynopsis: pathConfigHelp, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Configure", + }, } tokenutil.AddTokenFields(p.Fields) diff --git a/builtin/credential/okta/path_groups.go b/builtin/credential/okta/path_groups.go index 753c1cacdd5174..f9ff0225ac98c0 100644 --- a/builtin/credential/okta/path_groups.go +++ b/builtin/credential/okta/path_groups.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package okta import ( @@ -16,33 +13,22 @@ func pathGroupsList(b *backend) *framework.Path { return &framework.Path{ Pattern: "groups/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixOkta, - OperationSuffix: "groups", - Navigation: true, - ItemType: "Group", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathGroupList, }, HelpSynopsis: pathGroupHelpSyn, HelpDescription: pathGroupHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Navigation: true, + ItemType: "Group", + }, } } func pathGroups(b *backend) *framework.Path { return &framework.Path{ Pattern: `groups/(?P.+)`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixOkta, - OperationSuffix: "group", - Action: "Create", - ItemType: "Group", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -52,9 +38,6 @@ func pathGroups(b *backend) *framework.Path { "policies": { Type: framework.TypeCommaStringSlice, Description: "Comma-separated list of policies associated to the group.", - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of policies associated to the group.", - }, }, }, @@ -66,6 +49,10 @@ func pathGroups(b *backend) *framework.Path { HelpSynopsis: pathGroupHelpSyn, HelpDescription: pathGroupHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Create", + ItemType: "Group", + }, } } diff --git a/builtin/credential/okta/path_groups_test.go b/builtin/credential/okta/path_groups_test.go index 8e4ba8cc2d5d6c..84253f379fd8ae 100644 --- a/builtin/credential/okta/path_groups_test.go +++ b/builtin/credential/okta/path_groups_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package okta import ( diff --git a/builtin/credential/okta/path_login.go b/builtin/credential/okta/path_login.go index 1f2cb090a76c57..0f8967576bb7c1 100644 --- a/builtin/credential/okta/path_login.go +++ b/builtin/credential/okta/path_login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package okta import ( @@ -23,12 +20,6 @@ const ( func pathLogin(b *backend) *framework.Path { return &framework.Path{ Pattern: `login/(?P.+)`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixOkta, - OperationVerb: "login", - }, - Fields: map[string]*framework.FieldSchema{ "username": { Type: framework.TypeString, @@ -198,10 +189,6 @@ func (b *backend) pathLoginRenew(ctx context.Context, req *logical.Request, d *f func pathVerify(b *backend) *framework.Path { return &framework.Path{ Pattern: `verify/(?P.+)`, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixOkta, - OperationVerb: "verify", - }, Fields: map[string]*framework.FieldSchema{ "nonce": { Type: framework.TypeString, diff --git a/builtin/credential/okta/path_users.go b/builtin/credential/okta/path_users.go index 3c38e855548b3a..bd5fdc0ebbe0d8 100644 --- a/builtin/credential/okta/path_users.go +++ b/builtin/credential/okta/path_users.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package okta import ( @@ -14,33 +11,22 @@ func pathUsersList(b *backend) *framework.Path { return &framework.Path{ Pattern: "users/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixOkta, - OperationSuffix: "users", - Navigation: true, - ItemType: "User", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathUserList, }, HelpSynopsis: pathUserHelpSyn, HelpDescription: pathUserHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Navigation: true, + ItemType: "User", + }, } } func pathUsers(b *backend) *framework.Path { return &framework.Path{ Pattern: `users/(?P.+)`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixOkta, - OperationSuffix: "user", - Action: "Create", - ItemType: "User", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -66,6 +52,10 @@ func pathUsers(b *backend) *framework.Path { HelpSynopsis: pathUserHelpSyn, HelpDescription: pathUserHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Create", + ItemType: "User", + }, } } diff --git a/builtin/credential/radius/backend.go b/builtin/credential/radius/backend.go index 3ec37a6fe634d1..03da06efd9bbf5 100644 --- a/builtin/credential/radius/backend.go +++ b/builtin/credential/radius/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package radius import ( @@ -10,8 +7,6 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -const operationPrefixRadius = "radius" - func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b := Backend() if err := b.Setup(ctx, conf); err != nil { diff --git a/builtin/credential/radius/backend_test.go b/builtin/credential/radius/backend_test.go index 9e9567f470645e..17cf54367c2573 100644 --- a/builtin/credential/radius/backend_test.go +++ b/builtin/credential/radius/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package radius import ( @@ -8,14 +5,13 @@ import ( "fmt" "os" "reflect" - "runtime" "strconv" "strings" "testing" "time" + "github.com/hashicorp/vault/helper/testhelpers/docker" logicaltest "github.com/hashicorp/vault/helper/testhelpers/logical" - "github.com/hashicorp/vault/sdk/helper/docker" "github.com/hashicorp/vault/sdk/logical" ) @@ -31,10 +27,6 @@ const ( ) func prepareRadiusTestContainer(t *testing.T) (func(), string, int) { - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as this image is not supported on ARM architectures") - } - if os.Getenv(envRadiusRadiusHost) != "" { port, _ := strconv.Atoi(os.Getenv(envRadiusPort)) return func() {}, os.Getenv(envRadiusRadiusHost), port diff --git a/builtin/credential/radius/cmd/radius/main.go b/builtin/credential/radius/cmd/radius/main.go index b3045a31a1a734..9ab5a636948c7b 100644 --- a/builtin/credential/radius/cmd/radius/main.go +++ b/builtin/credential/radius/cmd/radius/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: radius.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/credential/radius/path_config.go b/builtin/credential/radius/path_config.go index 6bdc2967361d28..33d4d0d99175d7 100644 --- a/builtin/credential/radius/path_config.go +++ b/builtin/credential/radius/path_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package radius import ( @@ -15,12 +12,6 @@ import ( func pathConfig(b *backend) *framework.Path { p := &framework.Path{ Pattern: "config", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRadius, - Action: "Configure", - }, - Fields: map[string]*framework.FieldSchema{ "host": { Type: framework.TypeString, @@ -44,10 +35,9 @@ func pathConfig(b *backend) *framework.Path { "unregistered_user_policies": { Type: framework.TypeString, Default: "", - Description: "Comma-separated list of policies to grant upon successful RADIUS authentication of an unregistered user (default: empty)", + Description: "Comma-separated list of policies to grant upon successful RADIUS authentication of an unregisted user (default: empty)", DisplayAttrs: &framework.DisplayAttributes{ - Name: "Policies for unregistered users", - Description: "List of policies to grant upon successful RADIUS authentication of an unregistered user (default: empty)", + Name: "Policies for unregistered users", }, }, "dial_timeout": { @@ -87,29 +77,17 @@ func pathConfig(b *backend) *framework.Path { ExistenceCheck: b.configExistenceCheck, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "configuration", - }, - }, - logical.CreateOperation: &framework.PathOperation{ - Callback: b.pathConfigCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigCreateUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathConfigRead, + logical.CreateOperation: b.pathConfigCreateUpdate, + logical.UpdateOperation: b.pathConfigCreateUpdate, }, HelpSynopsis: pathConfigHelpSyn, HelpDescription: pathConfigHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Configure", + }, } tokenutil.AddTokenFields(p.Fields) diff --git a/builtin/credential/radius/path_login.go b/builtin/credential/radius/path_login.go index 6feaf1bfcaf401..c8a1ab8f43ed90 100644 --- a/builtin/credential/radius/path_login.go +++ b/builtin/credential/radius/path_login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package radius import ( @@ -23,13 +20,6 @@ import ( func pathLogin(b *backend) *framework.Path { return &framework.Path{ Pattern: "login" + framework.OptionalParamRegex("urlusername"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRadius, - OperationVerb: "login", - OperationSuffix: "|with-username", - }, - Fields: map[string]*framework.FieldSchema{ "urlusername": { Type: framework.TypeString, diff --git a/builtin/credential/radius/path_users.go b/builtin/credential/radius/path_users.go index 63ac5bbc2e8781..de7b5d4690e6ec 100644 --- a/builtin/credential/radius/path_users.go +++ b/builtin/credential/radius/path_users.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package radius import ( @@ -17,33 +14,22 @@ func pathUsersList(b *backend) *framework.Path { return &framework.Path{ Pattern: "users/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRadius, - OperationSuffix: "users", - Navigation: true, - ItemType: "User", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathUserList, }, HelpSynopsis: pathUserHelpSyn, HelpDescription: pathUserHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Navigation: true, + ItemType: "User", + }, } } func pathUsers(b *backend) *framework.Path { return &framework.Path{ Pattern: `users/(?P.+)`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRadius, - OperationSuffix: "user", - Action: "Create", - ItemType: "User", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -53,9 +39,6 @@ func pathUsers(b *backend) *framework.Path { "policies": { Type: framework.TypeCommaStringSlice, Description: "Comma-separated list of policies associated to the user.", - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of policies associated to the user.", - }, }, }, @@ -70,6 +53,10 @@ func pathUsers(b *backend) *framework.Path { HelpSynopsis: pathUserHelpSyn, HelpDescription: pathUserHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Create", + ItemType: "User", + }, } } diff --git a/builtin/credential/token/cli.go b/builtin/credential/token/cli.go index 853d6eade7c453..64a88169cbe7eb 100644 --- a/builtin/credential/token/cli.go +++ b/builtin/credential/token/cli.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token import ( diff --git a/builtin/credential/userpass/backend.go b/builtin/credential/userpass/backend.go index 428e8b25ca7663..aa45dc3766db23 100644 --- a/builtin/credential/userpass/backend.go +++ b/builtin/credential/userpass/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( @@ -10,8 +7,6 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -const operationPrefixUserpass = "userpass" - func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b := Backend() if err := b.Setup(ctx, conf); err != nil { diff --git a/builtin/credential/userpass/backend_test.go b/builtin/credential/userpass/backend_test.go index 3df8cfa2a9bb5a..83f79db9a4e1e1 100644 --- a/builtin/credential/userpass/backend_test.go +++ b/builtin/credential/userpass/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( diff --git a/builtin/credential/userpass/cli.go b/builtin/credential/userpass/cli.go index e100ae9f244e53..092d0927ef1f87 100644 --- a/builtin/credential/userpass/cli.go +++ b/builtin/credential/userpass/cli.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( diff --git a/builtin/credential/userpass/cmd/userpass/main.go b/builtin/credential/userpass/cmd/userpass/main.go index 4747a56f440974..5ea1894d219e54 100644 --- a/builtin/credential/userpass/cmd/userpass/main.go +++ b/builtin/credential/userpass/cmd/userpass/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -19,11 +16,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: userpass.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/credential/userpass/path_login.go b/builtin/credential/userpass/path_login.go index 0565864471e024..f41463f6061296 100644 --- a/builtin/credential/userpass/path_login.go +++ b/builtin/credential/userpass/path_login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( @@ -19,12 +16,6 @@ import ( func pathLogin(b *backend) *framework.Path { return &framework.Path{ Pattern: "login/" + framework.GenericNameRegex("username"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixUserpass, - OperationVerb: "login", - }, - Fields: map[string]*framework.FieldSchema{ "username": { Type: framework.TypeString, diff --git a/builtin/credential/userpass/path_user_password.go b/builtin/credential/userpass/path_user_password.go index 63b52ca0ca1a4f..5007497304789f 100644 --- a/builtin/credential/userpass/path_user_password.go +++ b/builtin/credential/userpass/path_user_password.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( @@ -16,13 +13,6 @@ import ( func pathUserPassword(b *backend) *framework.Path { return &framework.Path{ Pattern: "users/" + framework.GenericNameRegex("username") + "/password$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixUserpass, - OperationVerb: "reset", - OperationSuffix: "password", - }, - Fields: map[string]*framework.FieldSchema{ "username": { Type: framework.TypeString, diff --git a/builtin/credential/userpass/path_user_policies.go b/builtin/credential/userpass/path_user_policies.go index 8c5a9174ae966b..3c017253869e2b 100644 --- a/builtin/credential/userpass/path_user_policies.go +++ b/builtin/credential/userpass/path_user_policies.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( @@ -16,13 +13,6 @@ import ( func pathUserPolicies(b *backend) *framework.Path { return &framework.Path{ Pattern: "users/" + framework.GenericNameRegex("username") + "/policies$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixUserpass, - OperationVerb: "update", - OperationSuffix: "policies", - }, - Fields: map[string]*framework.FieldSchema{ "username": { Type: framework.TypeString, @@ -36,9 +26,6 @@ func pathUserPolicies(b *backend) *framework.Path { "token_policies": { Type: framework.TypeCommaStringSlice, Description: "Comma-separated list of policies", - DisplayAttrs: &framework.DisplayAttributes{ - Description: "A list of policies that will apply to the generated token for this user.", - }, }, }, diff --git a/builtin/credential/userpass/path_users.go b/builtin/credential/userpass/path_users.go index 221fc2c4fcbf3d..7ec22c5fbd4556 100644 --- a/builtin/credential/userpass/path_users.go +++ b/builtin/credential/userpass/path_users.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( @@ -19,33 +16,22 @@ func pathUsersList(b *backend) *framework.Path { return &framework.Path{ Pattern: "users/?", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixUserpass, - OperationSuffix: "users", - Navigation: true, - ItemType: "User", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathUserList, }, HelpSynopsis: pathUserHelpSyn, HelpDescription: pathUserHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Navigation: true, + ItemType: "User", + }, } } func pathUsers(b *backend) *framework.Path { p := &framework.Path{ Pattern: "users/" + framework.GenericNameRegex("username"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixUserpass, - OperationSuffix: "user", - Action: "Create", - ItemType: "User", - }, - Fields: map[string]*framework.FieldSchema{ "username": { Type: framework.TypeString, @@ -96,6 +82,10 @@ func pathUsers(b *backend) *framework.Path { HelpSynopsis: pathUserHelpSyn, HelpDescription: pathUserHelpDesc, + DisplayAttrs: &framework.DisplayAttributes{ + Action: "Create", + ItemType: "User", + }, } tokenutil.AddTokenFields(p.Fields) diff --git a/builtin/credential/userpass/stepwise_test.go b/builtin/credential/userpass/stepwise_test.go index ab797ed200c963..6f7000f58dc581 100644 --- a/builtin/credential/userpass/stepwise_test.go +++ b/builtin/credential/userpass/stepwise_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package userpass import ( diff --git a/builtin/logical/aws/backend.go b/builtin/logical/aws/backend.go index d93c394f98fd13..9c5abe1e82e80c 100644 --- a/builtin/logical/aws/backend.go +++ b/builtin/logical/aws/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -12,29 +9,24 @@ import ( "github.com/aws/aws-sdk-go/service/iam/iamiface" "github.com/aws/aws-sdk-go/service/sts/stsiface" "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/sdk/queue" ) const ( rootConfigPath = "config/root" minAwsUserRollbackAge = 5 * time.Minute - operationPrefixAWS = "aws" - operationPrefixAWSASD = "aws-config" ) func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { - b := Backend(conf) + b := Backend() if err := b.Setup(ctx, conf); err != nil { return nil, err } return b, nil } -func Backend(conf *logical.BackendConfig) *backend { +func Backend() *backend { var b backend - b.credRotationQueue = queue.New() b.Backend = &framework.Backend{ Help: strings.TrimSpace(backendHelp), @@ -43,8 +35,7 @@ func Backend(conf *logical.BackendConfig) *backend { framework.WALPrefix, }, SealWrapStorage: []string{ - rootConfigPath, - pathStaticCreds + "/", + "config/root", }, }, @@ -54,8 +45,6 @@ func Backend(conf *logical.BackendConfig) *backend { pathConfigLease(&b), pathRoles(&b), pathListRoles(&b), - pathStaticRoles(&b), - pathStaticCredentials(&b), pathUser(&b), }, @@ -66,17 +55,7 @@ func Backend(conf *logical.BackendConfig) *backend { Invalidate: b.invalidate, WALRollback: b.walRollback, WALRollbackMinAge: minAwsUserRollbackAge, - PeriodicFunc: func(ctx context.Context, req *logical.Request) error { - repState := conf.System.ReplicationState() - if (conf.System.LocalMount() || - !repState.HasState(consts.ReplicationPerformanceSecondary)) && - !repState.HasState(consts.ReplicationDRSecondary) && - !repState.HasState(consts.ReplicationPerformanceStandby) { - return b.rotateExpiredStaticCreds(ctx, req) - } - return nil - }, - BackendType: logical.TypeLogical, + BackendType: logical.TypeLogical, } return &b @@ -95,10 +74,6 @@ type backend struct { // to enable mocking with AWS iface for tests iamClient iamiface.IAMAPI stsClient stsiface.STSAPI - - // the age of a static role's credential is tracked by a priority queue and handled - // by the PeriodicFunc - credRotationQueue *queue.PriorityQueue } const backendHelp = ` diff --git a/builtin/logical/aws/backend_test.go b/builtin/logical/aws/backend_test.go index 260bcc6d645b77..5831dfea772c77 100644 --- a/builtin/logical/aws/backend_test.go +++ b/builtin/logical/aws/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -19,7 +16,6 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/credentials" - "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/dynamodb" "github.com/aws/aws-sdk-go/service/ec2" @@ -40,7 +36,7 @@ type mockIAMClient struct { iamiface.IAMAPI } -func (m *mockIAMClient) CreateUserWithContext(_ aws.Context, input *iam.CreateUserInput, _ ...request.Option) (*iam.CreateUserOutput, error) { +func (m *mockIAMClient) CreateUser(input *iam.CreateUserInput) (*iam.CreateUserOutput, error) { return nil, awserr.New("Throttling", "", nil) } @@ -148,7 +144,7 @@ func TestBackend_throttled(t *testing.T) { config := logical.TestBackendConfig() config.StorageView = &logical.InmemStorage{} - b := Backend(config) + b := Backend() if err := b.Setup(context.Background(), config); err != nil { t.Fatal(err) } @@ -684,19 +680,26 @@ func testAccStepRead(t *testing.T, path, name string, credentialTests []credenti } } -func testAccStepReadSTSResponse(name string, maximumTTL time.Duration) logicaltest.TestStep { +func testAccStepReadSTSResponse(name string, maximumTTL uint64) logicaltest.TestStep { return logicaltest.TestStep{ Operation: logical.ReadOperation, Path: "creds/" + name, Check: func(resp *logical.Response) error { - if resp.Secret == nil { - return fmt.Errorf("bad: nil Secret returned") + if resp.Secret != nil { + return fmt.Errorf("bad: STS tokens should return a nil secret, received: %+v", resp.Secret) } - ttl := resp.Secret.TTL - if ttl > maximumTTL { - return fmt.Errorf("bad: ttl of %d greater than maximum of %d", ttl/time.Second, maximumTTL/time.Second) + + if ttl, exists := resp.Data["ttl"]; exists { + ttlVal := ttl.(uint64) + + if ttlVal > maximumTTL { + return fmt.Errorf("bad: ttl of %d greater than maximum of %d", ttl, maximumTTL) + } + + return nil } - return nil + + return fmt.Errorf("response data missing ttl, received: %+v", resp.Data) }, } } @@ -1345,7 +1348,7 @@ func TestAcceptanceBackend_RoleDefaultSTSTTL(t *testing.T) { Steps: []logicaltest.TestStep{ testAccStepConfig(t), testAccStepWriteRole(t, "test", roleData), - testAccStepReadSTSResponse("test", time.Duration(minAwsAssumeRoleDuration)*time.Second), // allow a little slack + testAccStepReadSTSResponse("test", uint64(minAwsAssumeRoleDuration)), // allow a little slack }, Teardown: func() error { return deleteTestRole(roleName) diff --git a/builtin/logical/aws/client.go b/builtin/logical/aws/client.go index 71d24f3abb2e8c..80d839ed55edf0 100644 --- a/builtin/logical/aws/client.go +++ b/builtin/logical/aws/client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( diff --git a/builtin/logical/aws/cmd/aws/main.go b/builtin/logical/aws/cmd/aws/main.go index 28de1eb3f8d93f..74f7d97a7b868b 100644 --- a/builtin/logical/aws/cmd/aws/main.go +++ b/builtin/logical/aws/cmd/aws/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: aws.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/logical/aws/iam_policies.go b/builtin/logical/aws/iam_policies.go index 002a7389eb1613..caf79e33d310b7 100644 --- a/builtin/logical/aws/iam_policies.go +++ b/builtin/logical/aws/iam_policies.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -73,7 +70,7 @@ func (b *backend) getGroupPolicies(ctx context.Context, s logical.Storage, iamGr for _, g := range iamGroups { // Collect managed policy ARNs from the IAM Group - agp, err = iamClient.ListAttachedGroupPoliciesWithContext(ctx, &iam.ListAttachedGroupPoliciesInput{ + agp, err = iamClient.ListAttachedGroupPolicies(&iam.ListAttachedGroupPoliciesInput{ GroupName: aws.String(g), }) if err != nil { @@ -84,14 +81,14 @@ func (b *backend) getGroupPolicies(ctx context.Context, s logical.Storage, iamGr } // Collect inline policy names from the IAM Group - inlinePolicies, err = iamClient.ListGroupPoliciesWithContext(ctx, &iam.ListGroupPoliciesInput{ + inlinePolicies, err = iamClient.ListGroupPolicies(&iam.ListGroupPoliciesInput{ GroupName: aws.String(g), }) if err != nil { return nil, nil, err } for _, iP := range inlinePolicies.PolicyNames { - inlinePolicyDoc, err = iamClient.GetGroupPolicyWithContext(ctx, &iam.GetGroupPolicyInput{ + inlinePolicyDoc, err = iamClient.GetGroupPolicy(&iam.GetGroupPolicyInput{ GroupName: &g, PolicyName: iP, }) diff --git a/builtin/logical/aws/iam_policies_test.go b/builtin/logical/aws/iam_policies_test.go index 7f8f96adb87caa..ddba67f6b8bdcd 100644 --- a/builtin/logical/aws/iam_policies_test.go +++ b/builtin/logical/aws/iam_policies_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -8,7 +5,6 @@ import ( "testing" "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/iam" "github.com/aws/aws-sdk-go/service/iam/iamiface" "github.com/hashicorp/vault/sdk/logical" @@ -30,15 +26,15 @@ type mockGroupIAMClient struct { GetGroupPolicyResp iam.GetGroupPolicyOutput } -func (m mockGroupIAMClient) ListAttachedGroupPoliciesWithContext(_ aws.Context, in *iam.ListAttachedGroupPoliciesInput, _ ...request.Option) (*iam.ListAttachedGroupPoliciesOutput, error) { +func (m mockGroupIAMClient) ListAttachedGroupPolicies(in *iam.ListAttachedGroupPoliciesInput) (*iam.ListAttachedGroupPoliciesOutput, error) { return &m.ListAttachedGroupPoliciesResp, nil } -func (m mockGroupIAMClient) ListGroupPoliciesWithContext(_ aws.Context, in *iam.ListGroupPoliciesInput, _ ...request.Option) (*iam.ListGroupPoliciesOutput, error) { +func (m mockGroupIAMClient) ListGroupPolicies(in *iam.ListGroupPoliciesInput) (*iam.ListGroupPoliciesOutput, error) { return &m.ListGroupPoliciesResp, nil } -func (m mockGroupIAMClient) GetGroupPolicyWithContext(_ aws.Context, in *iam.GetGroupPolicyInput, _ ...request.Option) (*iam.GetGroupPolicyOutput, error) { +func (m mockGroupIAMClient) GetGroupPolicy(in *iam.GetGroupPolicyInput) (*iam.GetGroupPolicyOutput, error) { return &m.GetGroupPolicyResp, nil } @@ -141,7 +137,7 @@ func Test_getGroupPolicies(t *testing.T) { config := logical.TestBackendConfig() config.StorageView = &logical.InmemStorage{} - b := Backend(config) + b := Backend() if err := b.Setup(context.Background(), config); err != nil { t.Fatal(err) } diff --git a/builtin/logical/aws/path_config_lease.go b/builtin/logical/aws/path_config_lease.go index 1b01388a3b8a6d..b953b2305e3c6b 100644 --- a/builtin/logical/aws/path_config_lease.go +++ b/builtin/logical/aws/path_config_lease.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -15,11 +12,6 @@ import ( func pathConfigLease(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/lease", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - }, - Fields: map[string]*framework.FieldSchema{ "lease": { Type: framework.TypeString, @@ -32,20 +24,9 @@ func pathConfigLease(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathLeaseRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "lease-configuration", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathLeaseWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "lease", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathLeaseRead, + logical.UpdateOperation: b.pathLeaseWrite, }, HelpSynopsis: pathConfigLeaseHelpSyn, diff --git a/builtin/logical/aws/path_config_root.go b/builtin/logical/aws/path_config_root.go index bd6c09e0e5e3be..1262980fa8066c 100644 --- a/builtin/logical/aws/path_config_root.go +++ b/builtin/logical/aws/path_config_root.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -17,11 +14,6 @@ const defaultUserNameTemplate = `{{ if (eq .Type "STS") }}{{ printf "vault-%s-%s func pathConfigRoot(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/root", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - }, - Fields: map[string]*framework.FieldSchema{ "access_key": { Type: framework.TypeString, @@ -56,20 +48,9 @@ func pathConfigRoot(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigRootRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "root-iam-credentials-configuration", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigRootWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "root-iam-credentials", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathConfigRootRead, + logical.UpdateOperation: b.pathConfigRootWrite, }, HelpSynopsis: pathConfigRootHelpSyn, diff --git a/builtin/logical/aws/path_config_root_test.go b/builtin/logical/aws/path_config_root_test.go index a0070649041791..d90ee6cacb38d9 100644 --- a/builtin/logical/aws/path_config_root_test.go +++ b/builtin/logical/aws/path_config_root_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -15,7 +12,7 @@ func TestBackend_PathConfigRoot(t *testing.T) { config := logical.TestBackendConfig() config.StorageView = &logical.InmemStorage{} - b := Backend(config) + b := Backend() if err := b.Setup(context.Background(), config); err != nil { t.Fatal(err) } diff --git a/builtin/logical/aws/path_config_rotate_root.go b/builtin/logical/aws/path_config_rotate_root.go index 0434d22e54c8c5..1f7ca311336614 100644 --- a/builtin/logical/aws/path_config_rotate_root.go +++ b/builtin/logical/aws/path_config_rotate_root.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -16,13 +13,6 @@ import ( func pathConfigRotateRoot(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/rotate-root", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "root-iam-credentials", - OperationVerb: "rotate", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathConfigRotateRootUpdate, @@ -66,7 +56,7 @@ func (b *backend) pathConfigRotateRootUpdate(ctx context.Context, req *logical.R } var getUserInput iam.GetUserInput // empty input means get current user - getUserRes, err := client.GetUserWithContext(ctx, &getUserInput) + getUserRes, err := client.GetUser(&getUserInput) if err != nil { return nil, fmt.Errorf("error calling GetUser: %w", err) } @@ -83,7 +73,7 @@ func (b *backend) pathConfigRotateRootUpdate(ctx context.Context, req *logical.R createAccessKeyInput := iam.CreateAccessKeyInput{ UserName: getUserRes.User.UserName, } - createAccessKeyRes, err := client.CreateAccessKeyWithContext(ctx, &createAccessKeyInput) + createAccessKeyRes, err := client.CreateAccessKey(&createAccessKeyInput) if err != nil { return nil, fmt.Errorf("error calling CreateAccessKey: %w", err) } @@ -114,7 +104,7 @@ func (b *backend) pathConfigRotateRootUpdate(ctx context.Context, req *logical.R AccessKeyId: aws.String(oldAccessKey), UserName: getUserRes.User.UserName, } - _, err = client.DeleteAccessKeyWithContext(ctx, &deleteAccessKeyInput) + _, err = client.DeleteAccessKey(&deleteAccessKeyInput) if err != nil { return nil, fmt.Errorf("error deleting old access key: %w", err) } diff --git a/builtin/logical/aws/path_roles.go b/builtin/logical/aws/path_roles.go index 67545e641434d0..a7c3dd84a8967c 100644 --- a/builtin/logical/aws/path_roles.go +++ b/builtin/logical/aws/path_roles.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -27,11 +24,6 @@ func pathListRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "roles", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathRoleList, }, @@ -44,18 +36,12 @@ func pathListRoles(b *backend) *framework.Path { func pathRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/" + framework.GenericNameWithAtRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationSuffix: "role", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, - Description: "Name of the role", + Description: "Name of the policy", DisplayAttrs: &framework.DisplayAttributes{ - Name: "Role Name", + Name: "Policy Name", }, }, diff --git a/builtin/logical/aws/path_roles_test.go b/builtin/logical/aws/path_roles_test.go index c5bf167866cc94..39c9d90811fc4b 100644 --- a/builtin/logical/aws/path_roles_test.go +++ b/builtin/logical/aws/path_roles_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -21,7 +18,7 @@ func TestBackend_PathListRoles(t *testing.T) { config := logical.TestBackendConfig() config.StorageView = &logical.InmemStorage{} - b := Backend(config) + b := Backend() if err := b.Setup(context.Background(), config); err != nil { t.Fatal(err) } @@ -224,7 +221,7 @@ func TestRoleCRUDWithPermissionsBoundary(t *testing.T) { config := logical.TestBackendConfig() config.StorageView = &logical.InmemStorage{} - b := Backend(config) + b := Backend() if err := b.Setup(context.Background(), config); err != nil { t.Fatal(err) } @@ -268,7 +265,7 @@ func TestRoleWithPermissionsBoundaryValidation(t *testing.T) { config := logical.TestBackendConfig() config.StorageView = &logical.InmemStorage{} - b := Backend(config) + b := Backend() if err := b.Setup(context.Background(), config); err != nil { t.Fatal(err) } diff --git a/builtin/logical/aws/path_static_creds.go b/builtin/logical/aws/path_static_creds.go deleted file mode 100644 index 119f5d0b3debd3..00000000000000 --- a/builtin/logical/aws/path_static_creds.go +++ /dev/null @@ -1,99 +0,0 @@ -package aws - -import ( - "context" - "fmt" - "net/http" - - "github.com/fatih/structs" - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -const ( - pathStaticCreds = "static-creds" - - paramAccessKeyID = "access_key" - paramSecretsAccessKey = "secret_key" -) - -type awsCredentials struct { - AccessKeyID string `json:"access_key" structs:"access_key" mapstructure:"access_key"` - SecretAccessKey string `json:"secret_key" structs:"secret_key" mapstructure:"secret_key"` -} - -func pathStaticCredentials(b *backend) *framework.Path { - return &framework.Path{ - Pattern: fmt.Sprintf("%s/%s", pathStaticCreds, framework.GenericNameWithAtRegex(paramRoleName)), - Fields: map[string]*framework.FieldSchema{ - paramRoleName: { - Type: framework.TypeString, - Description: descRoleName, - }, - }, - - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathStaticCredsRead, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: http.StatusText(http.StatusOK), - Fields: map[string]*framework.FieldSchema{ - paramAccessKeyID: { - Type: framework.TypeString, - Description: descAccessKeyID, - }, - paramSecretsAccessKey: { - Type: framework.TypeString, - Description: descSecretAccessKey, - }, - }, - }}, - }, - }, - }, - - HelpSynopsis: pathStaticCredsHelpSyn, - HelpDescription: pathStaticCredsHelpDesc, - } -} - -func (b *backend) pathStaticCredsRead(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { - roleName, ok := data.GetOk(paramRoleName) - if !ok { - return nil, fmt.Errorf("missing %q parameter", paramRoleName) - } - - entry, err := req.Storage.Get(ctx, formatCredsStoragePath(roleName.(string))) - if err != nil { - return nil, fmt.Errorf("failed to read credentials for role %q: %w", roleName, err) - } - if entry == nil { - return nil, nil - } - - var credentials awsCredentials - if err := entry.DecodeJSON(&credentials); err != nil { - return nil, fmt.Errorf("failed to decode credentials: %w", err) - } - - return &logical.Response{ - Data: structs.New(credentials).Map(), - }, nil -} - -func formatCredsStoragePath(roleName string) string { - return fmt.Sprintf("%s/%s", pathStaticCreds, roleName) -} - -const pathStaticCredsHelpSyn = `Retrieve static credentials from the named role.` - -const pathStaticCredsHelpDesc = ` -This path reads AWS credentials for a certain static role. The keys are rotated -periodically according to their configuration, and will return the same password -until they are rotated.` - -const ( - descAccessKeyID = "The access key of the AWS Credential" - descSecretAccessKey = "The secret key of the AWS Credential" -) diff --git a/builtin/logical/aws/path_static_creds_test.go b/builtin/logical/aws/path_static_creds_test.go deleted file mode 100644 index c478e3f74358cb..00000000000000 --- a/builtin/logical/aws/path_static_creds_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package aws - -import ( - "context" - "reflect" - "testing" - - "github.com/fatih/structs" - - "github.com/hashicorp/vault/sdk/framework" - - "github.com/hashicorp/vault/sdk/logical" -) - -// TestStaticCredsRead verifies that we can correctly read a cred that exists, and correctly _not read_ -// a cred that does not exist. -func TestStaticCredsRead(t *testing.T) { - // setup - config := logical.TestBackendConfig() - config.StorageView = &logical.InmemStorage{} - bgCTX := context.Background() // for brevity later - - // insert a cred to get - creds := &awsCredentials{ - AccessKeyID: "foo", - SecretAccessKey: "bar", - } - entry, err := logical.StorageEntryJSON(formatCredsStoragePath("test"), creds) - if err != nil { - t.Fatal(err) - } - err = config.StorageView.Put(bgCTX, entry) - if err != nil { - t.Fatal(err) - } - - // cases - cases := []struct { - name string - roleName string - expectedError error - expectedResponse *logical.Response - }{ - { - name: "get existing creds", - roleName: "test", - expectedResponse: &logical.Response{ - Data: structs.New(creds).Map(), - }, - }, - { - name: "get non-existent creds", - roleName: "this-doesnt-exist", - // returns nil, nil - }, - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - b := Backend(config) - - req := &logical.Request{ - Storage: config.StorageView, - Data: map[string]interface{}{ - "name": c.roleName, - }, - } - resp, err := b.pathStaticCredsRead(bgCTX, req, staticCredsFieldData(req.Data)) - - if err != c.expectedError { - t.Fatalf("got error %q, but expected %q", err, c.expectedError) - } - if !reflect.DeepEqual(resp, c.expectedResponse) { - t.Fatalf("got response %v, but expected %v", resp, c.expectedResponse) - } - }) - } -} - -func staticCredsFieldData(data map[string]interface{}) *framework.FieldData { - schema := map[string]*framework.FieldSchema{ - paramRoleName: { - Type: framework.TypeString, - Description: descRoleName, - }, - } - - return &framework.FieldData{ - Raw: data, - Schema: schema, - } -} diff --git a/builtin/logical/aws/path_static_roles.go b/builtin/logical/aws/path_static_roles.go deleted file mode 100644 index b0aa3b02c7716d..00000000000000 --- a/builtin/logical/aws/path_static_roles.go +++ /dev/null @@ -1,331 +0,0 @@ -package aws - -import ( - "context" - "errors" - "fmt" - "net/http" - "time" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/iam" - "github.com/fatih/structs" - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/sdk/queue" -) - -const ( - pathStaticRole = "static-roles" - - paramRoleName = "name" - paramUsername = "username" - paramRotationPeriod = "rotation_period" -) - -type staticRoleEntry struct { - Name string `json:"name" structs:"name" mapstructure:"name"` - ID string `json:"id" structs:"id" mapstructure:"id"` - Username string `json:"username" structs:"username" mapstructure:"username"` - RotationPeriod time.Duration `json:"rotation_period" structs:"rotation_period" mapstructure:"rotation_period"` -} - -func pathStaticRoles(b *backend) *framework.Path { - roleResponse := map[int][]framework.Response{ - http.StatusOK: {{ - Description: http.StatusText(http.StatusOK), - Fields: map[string]*framework.FieldSchema{ - paramRoleName: { - Type: framework.TypeString, - Description: descRoleName, - }, - paramUsername: { - Type: framework.TypeString, - Description: descUsername, - }, - paramRotationPeriod: { - Type: framework.TypeDurationSecond, - Description: descRotationPeriod, - }, - }, - }}, - } - - return &framework.Path{ - Pattern: fmt.Sprintf("%s/%s", pathStaticRole, framework.GenericNameWithAtRegex(paramRoleName)), - Fields: map[string]*framework.FieldSchema{ - paramRoleName: { - Type: framework.TypeString, - Description: descRoleName, - }, - paramUsername: { - Type: framework.TypeString, - Description: descUsername, - }, - paramRotationPeriod: { - Type: framework.TypeDurationSecond, - Description: descRotationPeriod, - }, - }, - - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathStaticRolesRead, - Responses: roleResponse, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathStaticRolesWrite, - ForwardPerformanceSecondary: true, - ForwardPerformanceStandby: true, - Responses: roleResponse, - }, - logical.DeleteOperation: &framework.PathOperation{ - Callback: b.pathStaticRolesDelete, - ForwardPerformanceSecondary: true, - ForwardPerformanceStandby: true, - Responses: map[int][]framework.Response{ - http.StatusNoContent: {{ - Description: http.StatusText(http.StatusNoContent), - }}, - }, - }, - }, - - HelpSynopsis: pathStaticRolesHelpSyn, - HelpDescription: pathStaticRolesHelpDesc, - } -} - -func (b *backend) pathStaticRolesRead(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { - roleName, ok := data.GetOk(paramRoleName) - if !ok { - return nil, fmt.Errorf("missing %q parameter", paramRoleName) - } - - b.roleMutex.RLock() - defer b.roleMutex.RUnlock() - - entry, err := req.Storage.Get(ctx, formatRoleStoragePath(roleName.(string))) - if err != nil { - return nil, fmt.Errorf("failed to read configuration for static role %q: %w", roleName, err) - } - if entry == nil { - return nil, nil - } - - var config staticRoleEntry - if err := entry.DecodeJSON(&config); err != nil { - return nil, fmt.Errorf("failed to decode configuration for static role %q: %w", roleName, err) - } - - return &logical.Response{ - Data: formatResponse(config), - }, nil -} - -func (b *backend) pathStaticRolesWrite(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { - // Create & validate config from request parameters - config := staticRoleEntry{} - isCreate := req.Operation == logical.CreateOperation - - if rawRoleName, ok := data.GetOk(paramRoleName); ok { - config.Name = rawRoleName.(string) - - if err := b.validateRoleName(config.Name); err != nil { - return nil, err - } - } else { - return logical.ErrorResponse("missing %q parameter", paramRoleName), nil - } - - // retrieve old role value - entry, err := req.Storage.Get(ctx, formatRoleStoragePath(config.Name)) - if err != nil { - return nil, fmt.Errorf("couldn't check storage for pre-existing role: %w", err) - } - - if entry != nil { - err = entry.DecodeJSON(&config) - if err != nil { - return nil, fmt.Errorf("couldn't convert existing role into config struct: %w", err) - } - } else { - // if we couldn't find an entry, this is a create event - isCreate = true - } - - // other params are optional if we're not Creating - - if rawUsername, ok := data.GetOk(paramUsername); ok { - config.Username = rawUsername.(string) - - if err := b.validateIAMUserExists(ctx, req.Storage, &config, isCreate); err != nil { - return nil, err - } - } else if isCreate { - return logical.ErrorResponse("missing %q parameter", paramUsername), nil - } - - if rawRotationPeriod, ok := data.GetOk(paramRotationPeriod); ok { - config.RotationPeriod = time.Duration(rawRotationPeriod.(int)) * time.Second - - if err := b.validateRotationPeriod(config.RotationPeriod); err != nil { - return nil, err - } - } else if isCreate { - return logical.ErrorResponse("missing %q parameter", paramRotationPeriod), nil - } - - b.roleMutex.Lock() - defer b.roleMutex.Unlock() - - // Upsert role config - newRole, err := logical.StorageEntryJSON(formatRoleStoragePath(config.Name), config) - if err != nil { - return nil, fmt.Errorf("failed to marshal object to JSON: %w", err) - } - err = req.Storage.Put(ctx, newRole) - if err != nil { - return nil, fmt.Errorf("failed to save object in storage: %w", err) - } - - // Bootstrap initial set of keys if they did not exist before. AWS Secret Access Keys can only be obtained on creation, - // so we need to boostrap new roles with a new initial set of keys to be able to serve valid credentials to Vault clients. - existingCreds, err := req.Storage.Get(ctx, formatCredsStoragePath(config.Name)) - if err != nil { - return nil, fmt.Errorf("unable to verify if credentials already exist for role %q: %w", config.Name, err) - } - if existingCreds == nil { - err := b.createCredential(ctx, req.Storage, config, false) - if err != nil { - return nil, fmt.Errorf("failed to create new credentials for role %q: %w", config.Name, err) - } - - err = b.credRotationQueue.Push(&queue.Item{ - Key: config.Name, - Value: config, - Priority: time.Now().Add(config.RotationPeriod).Unix(), - }) - if err != nil { - return nil, fmt.Errorf("failed to add item into the rotation queue for role %q: %w", config.Name, err) - } - } - - return &logical.Response{ - Data: formatResponse(config), - }, nil -} - -func (b *backend) pathStaticRolesDelete(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) { - roleName, ok := data.GetOk(paramRoleName) - if !ok { - return nil, fmt.Errorf("missing %q parameter", paramRoleName) - } - - b.roleMutex.Lock() - defer b.roleMutex.Unlock() - - entry, err := req.Storage.Get(ctx, formatRoleStoragePath(roleName.(string))) - if err != nil { - return nil, fmt.Errorf("couldn't locate role in storage due to error: %w", err) - } - // no entry in storage, but no error either, congrats, it's deleted! - if entry == nil { - return nil, nil - } - var cfg staticRoleEntry - err = entry.DecodeJSON(&cfg) - if err != nil { - return nil, fmt.Errorf("couldn't convert storage entry to role config") - } - - err = b.deleteCredential(ctx, req.Storage, cfg, false) - if err != nil { - return nil, fmt.Errorf("failed to clean credentials while deleting role %q: %w", roleName.(string), err) - } - - // delete from the queue - _, err = b.credRotationQueue.PopByKey(cfg.Name) - if err != nil { - return nil, fmt.Errorf("couldn't delete key from queue: %w", err) - } - - return nil, req.Storage.Delete(ctx, formatRoleStoragePath(roleName.(string))) -} - -func (b *backend) validateRoleName(name string) error { - if name == "" { - return errors.New("empty role name attribute given") - } - return nil -} - -// validateIAMUser checks the user information we have for the role against the information on AWS. On a create, it uses the username -// to retrieve the user information and _sets_ the userID. On update, it validates the userID and username. -func (b *backend) validateIAMUserExists(ctx context.Context, storage logical.Storage, entry *staticRoleEntry, isCreate bool) error { - c, err := b.clientIAM(ctx, storage) - if err != nil { - return fmt.Errorf("unable to validate username %q: %w", entry.Username, err) - } - - // we don't really care about the content of the result, just that it's not an error - out, err := c.GetUser(&iam.GetUserInput{ - UserName: aws.String(entry.Username), - }) - if err != nil || out.User == nil { - return fmt.Errorf("unable to validate username %q: %w", entry.Username, err) - } - if *out.User.UserName != entry.Username { - return fmt.Errorf("AWS GetUser returned a username, but it didn't match: %q was requested, but %q was returned", entry.Username, *out.User.UserName) - } - - if !isCreate && *out.User.UserId != entry.ID { - return fmt.Errorf("AWS GetUser returned a user, but the ID did not match: %q was requested, but %q was returned", entry.ID, *out.User.UserId) - } else { - // if this is an insert, store the userID. This is the immutable part of an IAM user, but it's not exactly user-friendly. - // So, we allow users to specify usernames, but on updates we'll use the ID as a verification cross-check. - entry.ID = *out.User.UserId - } - - return nil -} - -const ( - minAllowableRotationPeriod = 1 * time.Minute -) - -func (b *backend) validateRotationPeriod(period time.Duration) error { - if period < minAllowableRotationPeriod { - return fmt.Errorf("role rotation period out of range: must be greater than %.2f seconds", minAllowableRotationPeriod.Seconds()) - } - return nil -} - -func formatResponse(cfg staticRoleEntry) map[string]interface{} { - response := structs.New(cfg).Map() - response[paramRotationPeriod] = int64(cfg.RotationPeriod.Seconds()) - - return response -} - -func formatRoleStoragePath(roleName string) string { - return fmt.Sprintf("%s/%s", pathStaticRole, roleName) -} - -const pathStaticRolesHelpSyn = ` -Manage static roles for AWS. -` - -const pathStaticRolesHelpDesc = ` -This path lets you manage static roles (users) for the AWS secret backend. -A static role is associated with a single IAM user, and manages the access -keys based on a rotation period, automatically rotating the credential. If -the IAM user has multiple access keys, the oldest key will be rotated. -` - -const ( - descRoleName = "The name of this role." - descUsername = "The IAM user to adopt as a static role." - descRotationPeriod = `Period by which to rotate the backing credential of the adopted user. -This can be a Go duration (e.g, '1m', 24h'), or an integer number of seconds.` -) diff --git a/builtin/logical/aws/path_static_roles_test.go b/builtin/logical/aws/path_static_roles_test.go deleted file mode 100644 index 205b42cd00978d..00000000000000 --- a/builtin/logical/aws/path_static_roles_test.go +++ /dev/null @@ -1,490 +0,0 @@ -package aws - -import ( - "context" - "errors" - "testing" - "time" - - "github.com/hashicorp/vault/sdk/queue" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/iam" - "github.com/hashicorp/go-secure-stdlib/awsutil" - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -// TestStaticRolesValidation verifies that valid requests pass validation and that invalid requests fail validation. -// This includes the user already existing in IAM roles, and the rotation period being sufficiently long. -func TestStaticRolesValidation(t *testing.T) { - config := logical.TestBackendConfig() - config.StorageView = &logical.InmemStorage{} - bgCTX := context.Background() // for brevity - - cases := []struct { - name string - opts []awsutil.MockIAMOption - requestData map[string]interface{} - isError bool - }{ - { - name: "all good", - opts: []awsutil.MockIAMOption{ - awsutil.WithGetUserOutput(&iam.GetUserOutput{User: &iam.User{UserName: aws.String("jane-doe"), UserId: aws.String("unique-id")}}), - awsutil.WithCreateAccessKeyOutput(&iam.CreateAccessKeyOutput{ - AccessKey: &iam.AccessKey{ - AccessKeyId: aws.String("abcdefghijklmnopqrstuvwxyz"), - SecretAccessKey: aws.String("zyxwvutsrqponmlkjihgfedcba"), - UserName: aws.String("jane-doe"), - }, - }), - awsutil.WithListAccessKeysOutput(&iam.ListAccessKeysOutput{ - AccessKeyMetadata: []*iam.AccessKeyMetadata{}, - IsTruncated: aws.Bool(false), - }), - }, - requestData: map[string]interface{}{ - "name": "test", - "username": "jane-doe", - "rotation_period": "1d", - }, - }, - { - name: "bad user", - opts: []awsutil.MockIAMOption{ - awsutil.WithGetUserError(errors.New("oh no")), - }, - requestData: map[string]interface{}{ - "name": "test", - "username": "jane-doe", - "rotation_period": "24h", - }, - isError: true, - }, - { - name: "user mismatch", - opts: []awsutil.MockIAMOption{ - awsutil.WithGetUserOutput(&iam.GetUserOutput{User: &iam.User{UserName: aws.String("ms-impostor"), UserId: aws.String("fake-id")}}), - }, - requestData: map[string]interface{}{ - "name": "test", - "username": "jane-doe", - "rotation_period": "1d2h", - }, - isError: true, - }, - { - name: "bad rotation period", - opts: []awsutil.MockIAMOption{ - awsutil.WithGetUserOutput(&iam.GetUserOutput{User: &iam.User{UserName: aws.String("jane-doe"), UserId: aws.String("unique-id")}}), - }, - requestData: map[string]interface{}{ - "name": "test", - "username": "jane-doe", - "rotation_period": "45s", - }, - isError: true, - }, - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - b := Backend(config) - miam, err := awsutil.NewMockIAM(c.opts...)(nil) - if err != nil { - t.Fatal(err) - } - b.iamClient = miam - if err := b.Setup(bgCTX, config); err != nil { - t.Fatal(err) - } - req := &logical.Request{ - Operation: logical.UpdateOperation, - Storage: config.StorageView, - Data: c.requestData, - Path: "static-roles/test", - } - _, err = b.pathStaticRolesWrite(bgCTX, req, staticRoleFieldData(req.Data)) - if c.isError && err == nil { - t.Fatal("expected an error but didn't get one") - } else if !c.isError && err != nil { - t.Fatalf("got an unexpected error: %s", err) - } - }) - } -} - -// TestStaticRolesWrite validates that we can write a new entry for a new static role, and that we correctly -// do not write if the request is invalid in some way. -func TestStaticRolesWrite(t *testing.T) { - bgCTX := context.Background() - - cases := []struct { - name string - opts []awsutil.MockIAMOption - data map[string]interface{} - expectedError bool - findUser bool - isUpdate bool - }{ - { - name: "happy path", - opts: []awsutil.MockIAMOption{ - awsutil.WithGetUserOutput(&iam.GetUserOutput{User: &iam.User{UserName: aws.String("jane-doe"), UserId: aws.String("unique-id")}}), - awsutil.WithListAccessKeysOutput(&iam.ListAccessKeysOutput{ - AccessKeyMetadata: []*iam.AccessKeyMetadata{}, - IsTruncated: aws.Bool(false), - }), - awsutil.WithCreateAccessKeyOutput(&iam.CreateAccessKeyOutput{ - AccessKey: &iam.AccessKey{ - AccessKeyId: aws.String("abcdefghijklmnopqrstuvwxyz"), - SecretAccessKey: aws.String("zyxwvutsrqponmlkjihgfedcba"), - UserName: aws.String("jane-doe"), - }, - }), - }, - data: map[string]interface{}{ - "name": "test", - "username": "jane-doe", - "rotation_period": "1d", - }, - // writes role, writes cred - findUser: true, - }, - { - name: "no aws user", - opts: []awsutil.MockIAMOption{ - awsutil.WithGetUserError(errors.New("no such user, etc etc")), - }, - data: map[string]interface{}{ - "name": "test", - "username": "a-nony-mous", - "rotation_period": "15s", - }, - expectedError: true, - }, - { - name: "update existing user", - opts: []awsutil.MockIAMOption{ - awsutil.WithGetUserOutput(&iam.GetUserOutput{User: &iam.User{UserName: aws.String("john-doe"), UserId: aws.String("unique-id")}}), - awsutil.WithListAccessKeysOutput(&iam.ListAccessKeysOutput{ - AccessKeyMetadata: []*iam.AccessKeyMetadata{}, - IsTruncated: aws.Bool(false), - }), - awsutil.WithCreateAccessKeyOutput(&iam.CreateAccessKeyOutput{ - AccessKey: &iam.AccessKey{ - AccessKeyId: aws.String("abcdefghijklmnopqrstuvwxyz"), - SecretAccessKey: aws.String("zyxwvutsrqponmlkjihgfedcba"), - UserName: aws.String("john-doe"), - }, - }), - }, - data: map[string]interface{}{ - "name": "johnny", - "rotation_period": "19m", - }, - findUser: true, - isUpdate: true, - }, - } - - // if a user exists (user doesn't exist is tested in validation) - // we'll check how many keys the user has - if it's two, we delete one. - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - config := logical.TestBackendConfig() - config.StorageView = &logical.InmemStorage{} - - miam, err := awsutil.NewMockIAM( - c.opts..., - )(nil) - if err != nil { - t.Fatal(err) - } - - b := Backend(config) - b.iamClient = miam - if err := b.Setup(bgCTX, config); err != nil { - t.Fatal(err) - } - - // put a role in storage for update tests - staticRole := staticRoleEntry{ - Name: "johnny", - Username: "john-doe", - ID: "unique-id", - RotationPeriod: 24 * time.Hour, - } - entry, err := logical.StorageEntryJSON(formatRoleStoragePath(staticRole.Name), staticRole) - if err != nil { - t.Fatal(err) - } - err = config.StorageView.Put(bgCTX, entry) - if err != nil { - t.Fatal(err) - } - - req := &logical.Request{ - Operation: logical.UpdateOperation, - Storage: config.StorageView, - Data: c.data, - Path: "static-roles/" + c.data["name"].(string), - } - - r, err := b.pathStaticRolesWrite(bgCTX, req, staticRoleFieldData(req.Data)) - if c.expectedError && err == nil { - t.Fatal(err) - } else if c.expectedError { - return // save us some if statements - } - - if err != nil { - t.Fatalf("got an error back unexpectedly: %s", err) - } - - if c.findUser && r == nil { - t.Fatal("response was nil, but it shouldn't have been") - } - - role, err := config.StorageView.Get(bgCTX, req.Path) - if c.findUser && (err != nil || role == nil) { - t.Fatalf("couldn't find the role we should have stored: %s", err) - } - var actualData staticRoleEntry - err = role.DecodeJSON(&actualData) - if err != nil { - t.Fatalf("couldn't convert storage data to role entry: %s", err) - } - - // construct expected data - var expectedData staticRoleEntry - fieldData := staticRoleFieldData(c.data) - if c.isUpdate { - // data is johnny + c.data - expectedData = staticRole - } - - if u, ok := fieldData.GetOk("username"); ok { - expectedData.Username = u.(string) - } - if r, ok := fieldData.GetOk("rotation_period"); ok { - expectedData.RotationPeriod = time.Duration(r.(int)) * time.Second - } - if n, ok := fieldData.GetOk("name"); ok { - expectedData.Name = n.(string) - } - - // validate fields - if eu, au := expectedData.Username, actualData.Username; eu != au { - t.Fatalf("mismatched username, expected %q but got %q", eu, au) - } - if er, ar := expectedData.RotationPeriod, actualData.RotationPeriod; er != ar { - t.Fatalf("mismatched rotation period, expected %q but got %q", er, ar) - } - if en, an := expectedData.Name, actualData.Name; en != an { - t.Fatalf("mismatched role name, expected %q, but got %q", en, an) - } - }) - } -} - -// TestStaticRoleRead validates that we can read a configured role and correctly do not read anything if we -// request something that doesn't exist. -func TestStaticRoleRead(t *testing.T) { - config := logical.TestBackendConfig() - config.StorageView = &logical.InmemStorage{} - bgCTX := context.Background() - - // test cases are run against an inmem storage holding a role called "test" attached to an IAM user called "jane-doe" - cases := []struct { - name string - roleName string - found bool - }{ - { - name: "role name exists", - roleName: "test", - found: true, - }, - { - name: "role name not found", - roleName: "toast", - found: false, // implied, but set for clarity - }, - } - - staticRole := staticRoleEntry{ - Name: "test", - Username: "jane-doe", - RotationPeriod: 24 * time.Hour, - } - entry, err := logical.StorageEntryJSON(formatRoleStoragePath(staticRole.Name), staticRole) - if err != nil { - t.Fatal(err) - } - err = config.StorageView.Put(bgCTX, entry) - if err != nil { - t.Fatal(err) - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - req := &logical.Request{ - Operation: logical.ReadOperation, - Storage: config.StorageView, - Data: map[string]interface{}{ - "name": c.roleName, - }, - Path: formatRoleStoragePath(c.roleName), - } - - b := Backend(config) - - r, err := b.pathStaticRolesRead(bgCTX, req, staticRoleFieldData(req.Data)) - if err != nil { - t.Fatal(err) - } - if c.found { - if r == nil { - t.Fatal("response was nil, but it shouldn't have been") - } - } else { - if r != nil { - t.Fatal("response should have been nil on a non-existent role") - } - } - }) - } -} - -// TestStaticRoleDelete validates that we correctly remove a role on a delete request, and that we correctly do not -// remove anything if a role does not exist with that name. -func TestStaticRoleDelete(t *testing.T) { - bgCTX := context.Background() - - // test cases are run against an inmem storage holding a role called "test" attached to an IAM user called "jane-doe" - cases := []struct { - name string - role string - found bool - }{ - { - name: "role found", - role: "test", - found: true, - }, - { - name: "role not found", - role: "tossed", - found: false, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - config := logical.TestBackendConfig() - config.StorageView = &logical.InmemStorage{} - - // fake an IAM - var iamfunc awsutil.IAMAPIFunc - if !c.found { - iamfunc = awsutil.NewMockIAM(awsutil.WithDeleteAccessKeyError(errors.New("shouldn't have called delete"))) - } else { - iamfunc = awsutil.NewMockIAM() - } - miam, err := iamfunc(nil) - if err != nil { - t.Fatalf("couldn't initialize mockiam: %s", err) - } - - b := Backend(config) - b.iamClient = miam - - // put in storage - staticRole := staticRoleEntry{ - Name: "test", - Username: "jane-doe", - RotationPeriod: 24 * time.Hour, - } - entry, err := logical.StorageEntryJSON(formatRoleStoragePath(staticRole.Name), staticRole) - if err != nil { - t.Fatal(err) - } - err = config.StorageView.Put(bgCTX, entry) - if err != nil { - t.Fatal(err) - } - - l, err := config.StorageView.List(bgCTX, "") - if err != nil || len(l) != 1 { - t.Fatalf("couldn't add an entry to storage during test setup: %s", err) - } - - // put in queue - err = b.credRotationQueue.Push(&queue.Item{ - Key: staticRole.Name, - Value: staticRole, - Priority: time.Now().Add(90 * time.Hour).Unix(), - }) - if err != nil { - t.Fatalf("couldn't add items to pq") - } - - req := &logical.Request{ - Operation: logical.ReadOperation, - Storage: config.StorageView, - Data: map[string]interface{}{ - "name": c.role, - }, - Path: formatRoleStoragePath(c.role), - } - - r, err := b.pathStaticRolesDelete(bgCTX, req, staticRoleFieldData(req.Data)) - if err != nil { - t.Fatal(err) - } - if r != nil { - t.Fatal("response wasn't nil, but it should have been") - } - - l, err = config.StorageView.List(bgCTX, "") - if err != nil { - t.Fatal(err) - } - if c.found && len(l) != 0 { - t.Fatal("size of role storage is non zero after delete") - } else if !c.found && len(l) != 1 { - t.Fatal("size of role storage changed after what should have been no deletion") - } - - if c.found && b.credRotationQueue.Len() != 0 { - t.Fatal("size of queue is non-zero after delete") - } else if !c.found && b.credRotationQueue.Len() != 1 { - t.Fatal("size of queue changed after what should have been no deletion") - } - }) - } -} - -func staticRoleFieldData(data map[string]interface{}) *framework.FieldData { - schema := map[string]*framework.FieldSchema{ - paramRoleName: { - Type: framework.TypeString, - Description: descRoleName, - }, - paramUsername: { - Type: framework.TypeString, - Description: descUsername, - }, - paramRotationPeriod: { - Type: framework.TypeDurationSecond, - Description: descRotationPeriod, - }, - } - - return &framework.FieldData{ - Raw: data, - Schema: schema, - } -} diff --git a/builtin/logical/aws/path_user.go b/builtin/logical/aws/path_user.go index f368365c60247e..035350cdbfb1ff 100644 --- a/builtin/logical/aws/path_user.go +++ b/builtin/logical/aws/path_user.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -21,12 +18,6 @@ import ( func pathUser(b *backend) *framework.Path { return &framework.Path{ Pattern: "(creds|sts)/" + framework.GenericNameWithAtRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixAWS, - OperationVerb: "generate", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -47,19 +38,9 @@ func pathUser(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathCredsRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "credentials|sts-credentials", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathCredsRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "credentials-with-parameters|sts-credentials-with-parameters", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathCredsRead, + logical.UpdateOperation: b.pathCredsRead, }, HelpSynopsis: pathUserHelpSyn, @@ -171,7 +152,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k } // Get information about this user - groupsResp, err := client.ListGroupsForUserWithContext(ctx, &iam.ListGroupsForUserInput{ + groupsResp, err := client.ListGroupsForUser(&iam.ListGroupsForUserInput{ UserName: aws.String(username), MaxItems: aws.Int64(1000), }) @@ -210,7 +191,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k groups := groupsResp.Groups // Inline (user) policies - policiesResp, err := client.ListUserPoliciesWithContext(ctx, &iam.ListUserPoliciesInput{ + policiesResp, err := client.ListUserPolicies(&iam.ListUserPoliciesInput{ UserName: aws.String(username), MaxItems: aws.Int64(1000), }) @@ -220,7 +201,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k policies := policiesResp.PolicyNames // Attached managed policies - manPoliciesResp, err := client.ListAttachedUserPoliciesWithContext(ctx, &iam.ListAttachedUserPoliciesInput{ + manPoliciesResp, err := client.ListAttachedUserPolicies(&iam.ListAttachedUserPoliciesInput{ UserName: aws.String(username), MaxItems: aws.Int64(1000), }) @@ -229,7 +210,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k } manPolicies := manPoliciesResp.AttachedPolicies - keysResp, err := client.ListAccessKeysWithContext(ctx, &iam.ListAccessKeysInput{ + keysResp, err := client.ListAccessKeys(&iam.ListAccessKeysInput{ UserName: aws.String(username), MaxItems: aws.Int64(1000), }) @@ -240,7 +221,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k // Revoke all keys for _, k := range keys { - _, err = client.DeleteAccessKeyWithContext(ctx, &iam.DeleteAccessKeyInput{ + _, err = client.DeleteAccessKey(&iam.DeleteAccessKeyInput{ AccessKeyId: k.AccessKeyId, UserName: aws.String(username), }) @@ -251,7 +232,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k // Detach managed policies for _, p := range manPolicies { - _, err = client.DetachUserPolicyWithContext(ctx, &iam.DetachUserPolicyInput{ + _, err = client.DetachUserPolicy(&iam.DetachUserPolicyInput{ UserName: aws.String(username), PolicyArn: p.PolicyArn, }) @@ -262,7 +243,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k // Delete any inline (user) policies for _, p := range policies { - _, err = client.DeleteUserPolicyWithContext(ctx, &iam.DeleteUserPolicyInput{ + _, err = client.DeleteUserPolicy(&iam.DeleteUserPolicyInput{ UserName: aws.String(username), PolicyName: p, }) @@ -273,7 +254,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k // Remove the user from all their groups for _, g := range groups { - _, err = client.RemoveUserFromGroupWithContext(ctx, &iam.RemoveUserFromGroupInput{ + _, err = client.RemoveUserFromGroup(&iam.RemoveUserFromGroupInput{ GroupName: g.GroupName, UserName: aws.String(username), }) @@ -283,7 +264,7 @@ func (b *backend) pathUserRollback(ctx context.Context, req *logical.Request, _k } // Delete the user - _, err = client.DeleteUserWithContext(ctx, &iam.DeleteUserInput{ + _, err = client.DeleteUser(&iam.DeleteUserInput{ UserName: aws.String(username), }) if err != nil { diff --git a/builtin/logical/aws/rollback.go b/builtin/logical/aws/rollback.go index 847ecd1c258b79..e498fc6b2bafd7 100644 --- a/builtin/logical/aws/rollback.go +++ b/builtin/logical/aws/rollback.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( diff --git a/builtin/logical/aws/rotation.go b/builtin/logical/aws/rotation.go deleted file mode 100644 index 44614376246567..00000000000000 --- a/builtin/logical/aws/rotation.go +++ /dev/null @@ -1,188 +0,0 @@ -package aws - -import ( - "context" - "fmt" - "time" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/iam" - "github.com/hashicorp/go-multierror" - "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/sdk/queue" -) - -// rotateExpiredStaticCreds will pop expired credentials (credentials whose priority -// represents a time before the present), rotate the associated credential, and push -// them back onto the queue with the new priority. -func (b *backend) rotateExpiredStaticCreds(ctx context.Context, req *logical.Request) error { - var errs *multierror.Error - - for { - keepGoing, err := b.rotateCredential(ctx, req.Storage) - if err != nil { - errs = multierror.Append(errs, err) - } - if !keepGoing { - if errs.ErrorOrNil() != nil { - return fmt.Errorf("error(s) occurred while rotating expired static credentials: %w", errs) - } else { - return nil - } - } - } -} - -// rotateCredential pops an element from the priority queue, and if it is expired, rotate and re-push. -// If a cred was rotated, it returns true, otherwise false. -func (b *backend) rotateCredential(ctx context.Context, storage logical.Storage) (rotated bool, err error) { - // If queue is empty or first item does not need a rotation (priority is next rotation timestamp) there is nothing to do - item, err := b.credRotationQueue.Pop() - if err != nil { - // the queue is just empty, which is fine. - if err == queue.ErrEmpty { - return false, nil - } - return false, fmt.Errorf("failed to pop from queue for role %q: %w", item.Key, err) - } - if item.Priority > time.Now().Unix() { - // no rotation required - // push the item back into priority queue - err = b.credRotationQueue.Push(item) - if err != nil { - return false, fmt.Errorf("failed to add item into the rotation queue for role %q: %w", item.Key, err) - } - return false, nil - } - - cfg := item.Value.(staticRoleEntry) - - err = b.createCredential(ctx, storage, cfg, true) - if err != nil { - return false, err - } - - // set new priority and re-queue - item.Priority = time.Now().Add(cfg.RotationPeriod).Unix() - err = b.credRotationQueue.Push(item) - if err != nil { - return false, fmt.Errorf("failed to add item into the rotation queue for role %q: %w", cfg.Name, err) - } - - return true, nil -} - -// createCredential will create a new iam credential, deleting the oldest one if necessary. -func (b *backend) createCredential(ctx context.Context, storage logical.Storage, cfg staticRoleEntry, shouldLockStorage bool) error { - iamClient, err := b.clientIAM(ctx, storage) - if err != nil { - return fmt.Errorf("unable to get the AWS IAM client: %w", err) - } - - // IAM users can have a most 2 sets of keys at a time. - // (https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_iam-quotas.html) - // Ideally we would get this value through an api check, but I'm not sure one exists. - const maxAllowedKeys = 2 - - err = b.validateIAMUserExists(ctx, storage, &cfg, false) - if err != nil { - return fmt.Errorf("iam user didn't exist, or username/userid didn't match: %w", err) - } - - accessKeys, err := iamClient.ListAccessKeys(&iam.ListAccessKeysInput{ - UserName: aws.String(cfg.Username), - }) - if err != nil { - return fmt.Errorf("unable to list existing access keys for IAM user %q: %w", cfg.Username, err) - } - - // If we have the maximum number of keys, we have to delete one to make another (so we can get the credentials). - // We'll delete the oldest one. - // - // Since this check relies on a pre-coded maximum, it's a bit fragile. If the number goes up, we risk deleting - // a key when we didn't need to. If this number goes down, we'll start throwing errors because we think we're - // allowed to create a key and aren't. In either case, adjusting the constant should be sufficient to fix things. - if len(accessKeys.AccessKeyMetadata) >= maxAllowedKeys { - oldestKey := accessKeys.AccessKeyMetadata[0] - - for i := 1; i < len(accessKeys.AccessKeyMetadata); i++ { - if accessKeys.AccessKeyMetadata[i].CreateDate.Before(*oldestKey.CreateDate) { - oldestKey = accessKeys.AccessKeyMetadata[i] - } - } - - _, err := iamClient.DeleteAccessKey(&iam.DeleteAccessKeyInput{ - AccessKeyId: oldestKey.AccessKeyId, - UserName: oldestKey.UserName, - }) - if err != nil { - return fmt.Errorf("unable to delete oldest access keys for user %q: %w", cfg.Username, err) - } - } - - // Create new set of keys - out, err := iamClient.CreateAccessKey(&iam.CreateAccessKeyInput{ - UserName: aws.String(cfg.Username), - }) - if err != nil { - return fmt.Errorf("unable to create new access keys for user %q: %w", cfg.Username, err) - } - - // Persist new keys - entry, err := logical.StorageEntryJSON(formatCredsStoragePath(cfg.Name), &awsCredentials{ - AccessKeyID: *out.AccessKey.AccessKeyId, - SecretAccessKey: *out.AccessKey.SecretAccessKey, - }) - if err != nil { - return fmt.Errorf("failed to marshal object to JSON: %w", err) - } - if shouldLockStorage { - b.roleMutex.Lock() - defer b.roleMutex.Unlock() - } - err = storage.Put(ctx, entry) - if err != nil { - return fmt.Errorf("failed to save object in storage: %w", err) - } - - return nil -} - -// delete credential will remove the credential associated with the role from storage. -func (b *backend) deleteCredential(ctx context.Context, storage logical.Storage, cfg staticRoleEntry, shouldLockStorage bool) error { - // synchronize storage access if we didn't in the caller. - if shouldLockStorage { - b.roleMutex.Lock() - defer b.roleMutex.Unlock() - } - - key, err := storage.Get(ctx, formatCredsStoragePath(cfg.Name)) - if err != nil { - return fmt.Errorf("couldn't find key in storage: %w", err) - } - // no entry, so i guess we deleted it already - if key == nil { - return nil - } - var creds awsCredentials - err = key.DecodeJSON(&creds) - if err != nil { - return fmt.Errorf("couldn't decode storage entry to a valid credential: %w", err) - } - - err = storage.Delete(ctx, formatCredsStoragePath(cfg.Name)) - if err != nil { - return fmt.Errorf("couldn't delete from storage: %w", err) - } - - // because we have the information, this is the one we created, so it's safe for us to delete. - _, err = b.iamClient.DeleteAccessKey(&iam.DeleteAccessKeyInput{ - AccessKeyId: aws.String(creds.AccessKeyID), - UserName: aws.String(cfg.Username), - }) - if err != nil { - return fmt.Errorf("couldn't delete from IAM: %w", err) - } - - return nil -} diff --git a/builtin/logical/aws/rotation_test.go b/builtin/logical/aws/rotation_test.go deleted file mode 100644 index 8f672efc69196c..00000000000000 --- a/builtin/logical/aws/rotation_test.go +++ /dev/null @@ -1,348 +0,0 @@ -package aws - -import ( - "context" - "errors" - "testing" - "time" - - "github.com/aws/aws-sdk-go/service/iam/iamiface" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/iam" - "github.com/hashicorp/go-secure-stdlib/awsutil" - "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/sdk/queue" -) - -// TestRotation verifies that the rotation code and priority queue correctly selects and rotates credentials -// for static secrets. -func TestRotation(t *testing.T) { - bgCTX := context.Background() - - type credToInsert struct { - config staticRoleEntry // role configuration from a normal createRole request - age time.Duration // how old the cred should be - if this is longer than the config.RotationPeriod, - // the cred is 'pre-expired' - - changed bool // whether we expect the cred to change - this is technically redundant to a comparison between - // rotationPeriod and age. - } - - // due to a limitation with the mockIAM implementation, any cred you want to rotate must have - // username jane-doe and userid unique-id, since we can only pre-can one exact response to GetUser - cases := []struct { - name string - creds []credToInsert - }{ - { - name: "refresh one", - creds: []credToInsert{ - { - config: staticRoleEntry{ - Name: "test", - Username: "jane-doe", - ID: "unique-id", - RotationPeriod: 2 * time.Second, - }, - age: 5 * time.Second, - changed: true, - }, - }, - }, - { - name: "refresh none", - creds: []credToInsert{ - { - config: staticRoleEntry{ - Name: "test", - Username: "jane-doe", - ID: "unique-id", - RotationPeriod: 1 * time.Minute, - }, - age: 5 * time.Second, - changed: false, - }, - }, - }, - { - name: "refresh one of two", - creds: []credToInsert{ - { - config: staticRoleEntry{ - Name: "toast", - Username: "john-doe", - ID: "other-id", - RotationPeriod: 1 * time.Minute, - }, - age: 5 * time.Second, - changed: false, - }, - { - config: staticRoleEntry{ - Name: "test", - Username: "jane-doe", - ID: "unique-id", - RotationPeriod: 1 * time.Second, - }, - age: 5 * time.Second, - changed: true, - }, - }, - }, - { - name: "no creds to rotate", - creds: []credToInsert{}, - }, - } - - ak := "long-access-key-id" - oldSecret := "abcdefghijklmnopqrstuvwxyz" - newSecret := "zyxwvutsrqponmlkjihgfedcba" - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - config := logical.TestBackendConfig() - config.StorageView = &logical.InmemStorage{} - - b := Backend(config) - - // insert all our creds - for i, cred := range c.creds { - - // all the creds will be the same for every user, but that's okay - // since what we care about is whether they changed on a single-user basis. - miam, err := awsutil.NewMockIAM( - // blank list for existing user - awsutil.WithListAccessKeysOutput(&iam.ListAccessKeysOutput{ - AccessKeyMetadata: []*iam.AccessKeyMetadata{ - {}, - }, - }), - // initial key to store - awsutil.WithCreateAccessKeyOutput(&iam.CreateAccessKeyOutput{ - AccessKey: &iam.AccessKey{ - AccessKeyId: aws.String(ak), - SecretAccessKey: aws.String(oldSecret), - }, - }), - awsutil.WithGetUserOutput(&iam.GetUserOutput{ - User: &iam.User{ - UserId: aws.String(cred.config.ID), - UserName: aws.String(cred.config.Username), - }, - }), - )(nil) - if err != nil { - t.Fatalf("couldn't initialze mock IAM handler: %s", err) - } - b.iamClient = miam - - err = b.createCredential(bgCTX, config.StorageView, cred.config, true) - if err != nil { - t.Fatalf("couldn't insert credential %d: %s", i, err) - } - - item := &queue.Item{ - Key: cred.config.Name, - Value: cred.config, - Priority: time.Now().Add(-1 * cred.age).Add(cred.config.RotationPeriod).Unix(), - } - err = b.credRotationQueue.Push(item) - if err != nil { - t.Fatalf("couldn't push item onto queue: %s", err) - } - } - - // update aws responses, same argument for why it's okay every cred will be the same - miam, err := awsutil.NewMockIAM( - // old key - awsutil.WithListAccessKeysOutput(&iam.ListAccessKeysOutput{ - AccessKeyMetadata: []*iam.AccessKeyMetadata{ - { - AccessKeyId: aws.String(ak), - }, - }, - }), - // new key - awsutil.WithCreateAccessKeyOutput(&iam.CreateAccessKeyOutput{ - AccessKey: &iam.AccessKey{ - AccessKeyId: aws.String(ak), - SecretAccessKey: aws.String(newSecret), - }, - }), - awsutil.WithGetUserOutput(&iam.GetUserOutput{ - User: &iam.User{ - UserId: aws.String("unique-id"), - UserName: aws.String("jane-doe"), - }, - }), - )(nil) - if err != nil { - t.Fatalf("couldn't initialze mock IAM handler: %s", err) - } - b.iamClient = miam - - req := &logical.Request{ - Storage: config.StorageView, - } - err = b.rotateExpiredStaticCreds(bgCTX, req) - if err != nil { - t.Fatalf("got an error rotating credentials: %s", err) - } - - // check our credentials - for i, cred := range c.creds { - entry, err := config.StorageView.Get(bgCTX, formatCredsStoragePath(cred.config.Name)) - if err != nil { - t.Fatalf("got an error retrieving credentials %d", i) - } - var out awsCredentials - err = entry.DecodeJSON(&out) - if err != nil { - t.Fatalf("could not unmarshal storage view entry for cred %d to an aws credential: %s", i, err) - } - - if cred.changed && out.SecretAccessKey != newSecret { - t.Fatalf("expected the key for cred %d to have changed, but it hasn't", i) - } else if !cred.changed && out.SecretAccessKey != oldSecret { - t.Fatalf("expected the key for cred %d to have stayed the same, but it changed", i) - } - } - }) - } -} - -type fakeIAM struct { - iamiface.IAMAPI - delReqs []*iam.DeleteAccessKeyInput -} - -func (f *fakeIAM) DeleteAccessKey(r *iam.DeleteAccessKeyInput) (*iam.DeleteAccessKeyOutput, error) { - f.delReqs = append(f.delReqs, r) - return f.IAMAPI.DeleteAccessKey(r) -} - -// TestCreateCredential verifies that credential creation firstly only deletes credentials if it needs to (i.e., two -// or more credentials on IAM), and secondly correctly deletes the oldest one. -func TestCreateCredential(t *testing.T) { - cases := []struct { - name string - username string - id string - deletedKey string - opts []awsutil.MockIAMOption - }{ - { - name: "zero keys", - username: "jane-doe", - id: "unique-id", - opts: []awsutil.MockIAMOption{ - awsutil.WithListAccessKeysOutput(&iam.ListAccessKeysOutput{ - AccessKeyMetadata: []*iam.AccessKeyMetadata{}, - }), - // delete should _not_ be called - awsutil.WithDeleteAccessKeyError(errors.New("should not have been called")), - awsutil.WithCreateAccessKeyOutput(&iam.CreateAccessKeyOutput{ - AccessKey: &iam.AccessKey{ - AccessKeyId: aws.String("key"), - SecretAccessKey: aws.String("itsasecret"), - }, - }), - awsutil.WithGetUserOutput(&iam.GetUserOutput{ - User: &iam.User{ - UserId: aws.String("unique-id"), - UserName: aws.String("jane-doe"), - }, - }), - }, - }, - { - name: "one key", - username: "jane-doe", - id: "unique-id", - opts: []awsutil.MockIAMOption{ - awsutil.WithListAccessKeysOutput(&iam.ListAccessKeysOutput{ - AccessKeyMetadata: []*iam.AccessKeyMetadata{ - {AccessKeyId: aws.String("foo"), CreateDate: aws.Time(time.Now())}, - }, - }), - // delete should _not_ be called - awsutil.WithDeleteAccessKeyError(errors.New("should not have been called")), - awsutil.WithCreateAccessKeyOutput(&iam.CreateAccessKeyOutput{ - AccessKey: &iam.AccessKey{ - AccessKeyId: aws.String("key"), - SecretAccessKey: aws.String("itsasecret"), - }, - }), - awsutil.WithGetUserOutput(&iam.GetUserOutput{ - User: &iam.User{ - UserId: aws.String("unique-id"), - UserName: aws.String("jane-doe"), - }, - }), - }, - }, - { - name: "two keys", - username: "jane-doe", - id: "unique-id", - deletedKey: "foo", - opts: []awsutil.MockIAMOption{ - awsutil.WithListAccessKeysOutput(&iam.ListAccessKeysOutput{ - AccessKeyMetadata: []*iam.AccessKeyMetadata{ - {AccessKeyId: aws.String("foo"), CreateDate: aws.Time(time.Time{})}, - {AccessKeyId: aws.String("bar"), CreateDate: aws.Time(time.Now())}, - }, - }), - awsutil.WithCreateAccessKeyOutput(&iam.CreateAccessKeyOutput{ - AccessKey: &iam.AccessKey{ - AccessKeyId: aws.String("key"), - SecretAccessKey: aws.String("itsasecret"), - }, - }), - awsutil.WithGetUserOutput(&iam.GetUserOutput{ - User: &iam.User{ - UserId: aws.String("unique-id"), - UserName: aws.String("jane-doe"), - }, - }), - }, - }, - } - - config := logical.TestBackendConfig() - config.StorageView = &logical.InmemStorage{} - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - miam, err := awsutil.NewMockIAM( - c.opts..., - )(nil) - if err != nil { - t.Fatal(err) - } - fiam := &fakeIAM{ - IAMAPI: miam, - } - - b := Backend(config) - b.iamClient = fiam - - err = b.createCredential(context.Background(), config.StorageView, staticRoleEntry{Username: c.username, ID: c.id}, true) - if err != nil { - t.Fatalf("got an error we didn't expect: %q", err) - } - - if c.deletedKey != "" { - if len(fiam.delReqs) != 1 { - t.Fatalf("called the wrong number of deletes (called %d deletes)", len(fiam.delReqs)) - } - actualKey := *fiam.delReqs[0].AccessKeyId - if c.deletedKey != actualKey { - t.Fatalf("we deleted the wrong key: %q instead of %q", actualKey, c.deletedKey) - } - } - }) - } -} diff --git a/builtin/logical/aws/secret_access_keys.go b/builtin/logical/aws/secret_access_keys.go index 2f1ac442bcbf50..c3818d5d9727ee 100644 --- a/builtin/logical/aws/secret_access_keys.go +++ b/builtin/logical/aws/secret_access_keys.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -153,7 +150,7 @@ func (b *backend) getFederationToken(ctx context.Context, s logical.Storage, return logical.ErrorResponse("must specify at least one of policy_arns or policy_document with %s credential_type", federationTokenCred), nil } - tokenResp, err := stsClient.GetFederationTokenWithContext(ctx, getTokenInput) + tokenResp, err := stsClient.GetFederationToken(getTokenInput) if err != nil { return logical.ErrorResponse("Error generating STS keys: %s", err), awsutil.CheckAWSError(err) } @@ -241,7 +238,7 @@ func (b *backend) assumeRole(ctx context.Context, s logical.Storage, if len(policyARNs) > 0 { assumeRoleInput.SetPolicyArns(convertPolicyARNs(policyARNs)) } - tokenResp, err := stsClient.AssumeRoleWithContext(ctx, assumeRoleInput) + tokenResp, err := stsClient.AssumeRole(assumeRoleInput) if err != nil { return logical.ErrorResponse("Error assuming role: %s", err), awsutil.CheckAWSError(err) } @@ -340,7 +337,7 @@ func (b *backend) secretAccessKeysCreate( } // Create the user - _, err = iamClient.CreateUserWithContext(ctx, createUserRequest) + _, err = iamClient.CreateUser(createUserRequest) if err != nil { if walErr := framework.DeleteWAL(ctx, s, walID); walErr != nil { iamErr := fmt.Errorf("error creating IAM user: %w", err) @@ -351,7 +348,7 @@ func (b *backend) secretAccessKeysCreate( for _, arn := range role.PolicyArns { // Attach existing policy against user - _, err = iamClient.AttachUserPolicyWithContext(ctx, &iam.AttachUserPolicyInput{ + _, err = iamClient.AttachUserPolicy(&iam.AttachUserPolicyInput{ UserName: aws.String(username), PolicyArn: aws.String(arn), }) @@ -362,7 +359,7 @@ func (b *backend) secretAccessKeysCreate( } if role.PolicyDocument != "" { // Add new inline user policy against user - _, err = iamClient.PutUserPolicyWithContext(ctx, &iam.PutUserPolicyInput{ + _, err = iamClient.PutUserPolicy(&iam.PutUserPolicyInput{ UserName: aws.String(username), PolicyName: aws.String(policyName), PolicyDocument: aws.String(role.PolicyDocument), @@ -374,7 +371,7 @@ func (b *backend) secretAccessKeysCreate( for _, group := range role.IAMGroups { // Add user to IAM groups - _, err = iamClient.AddUserToGroupWithContext(ctx, &iam.AddUserToGroupInput{ + _, err = iamClient.AddUserToGroup(&iam.AddUserToGroupInput{ UserName: aws.String(username), GroupName: aws.String(group), }) @@ -393,7 +390,7 @@ func (b *backend) secretAccessKeysCreate( } if len(tags) > 0 { - _, err = iamClient.TagUserWithContext(ctx, &iam.TagUserInput{ + _, err = iamClient.TagUser(&iam.TagUserInput{ Tags: tags, UserName: &username, }) @@ -404,7 +401,7 @@ func (b *backend) secretAccessKeysCreate( } // Create the keys - keyResp, err := iamClient.CreateAccessKeyWithContext(ctx, &iam.CreateAccessKeyInput{ + keyResp, err := iamClient.CreateAccessKey(&iam.CreateAccessKeyInput{ UserName: aws.String(username), }) if err != nil { diff --git a/builtin/logical/aws/secret_access_keys_test.go b/builtin/logical/aws/secret_access_keys_test.go index 890bb57b091228..7ee9d33b802757 100644 --- a/builtin/logical/aws/secret_access_keys_test.go +++ b/builtin/logical/aws/secret_access_keys_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -120,7 +117,7 @@ func TestGenUsername(t *testing.T) { func TestReadConfig_DefaultTemplate(t *testing.T) { config := logical.TestBackendConfig() config.StorageView = &logical.InmemStorage{} - b := Backend(config) + b := Backend() if err := b.Setup(context.Background(), config); err != nil { t.Fatal(err) } @@ -164,7 +161,7 @@ func TestReadConfig_DefaultTemplate(t *testing.T) { func TestReadConfig_CustomTemplate(t *testing.T) { config := logical.TestBackendConfig() config.StorageView = &logical.InmemStorage{} - b := Backend(config) + b := Backend() if err := b.Setup(context.Background(), config); err != nil { t.Fatal(err) } diff --git a/builtin/logical/aws/stepwise_test.go b/builtin/logical/aws/stepwise_test.go index b6f1ffea81d51c..5eb9c1a21c731d 100644 --- a/builtin/logical/aws/stepwise_test.go +++ b/builtin/logical/aws/stepwise_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( diff --git a/builtin/logical/consul/backend.go b/builtin/logical/consul/backend.go index 5e42a5197ed09a..7fce10e2629443 100644 --- a/builtin/logical/consul/backend.go +++ b/builtin/logical/consul/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( @@ -10,8 +7,6 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -const operationPrefixConsul = "consul" - // ReportedVersion is used to report a specific version to Vault. var ReportedVersion = "" diff --git a/builtin/logical/consul/backend_test.go b/builtin/logical/consul/backend_test.go index 94ce864d965f72..fa7cf647135a2f 100644 --- a/builtin/logical/consul/backend_test.go +++ b/builtin/logical/consul/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/builtin/logical/consul/client.go b/builtin/logical/consul/client.go index 1e30c660271c45..fd54830a4b7778 100644 --- a/builtin/logical/consul/client.go +++ b/builtin/logical/consul/client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/builtin/logical/consul/cmd/consul/main.go b/builtin/logical/consul/cmd/consul/main.go index f42a535b958764..3b884ddf85ef4e 100644 --- a/builtin/logical/consul/cmd/consul/main.go +++ b/builtin/logical/consul/cmd/consul/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: consul.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/logical/consul/path_config.go b/builtin/logical/consul/path_config.go index 2f925d5b24e91b..1fd60e30ec5782 100644 --- a/builtin/logical/consul/path_config.go +++ b/builtin/logical/consul/path_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( @@ -15,11 +12,6 @@ import ( func pathConfigAccess(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/access", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixConsul, - }, - Fields: map[string]*framework.FieldSchema{ "address": { Type: framework.TypeString, @@ -60,20 +52,9 @@ must be x509 PEM encoded and if this is set you need to also set client_cert.`, }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigAccessRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "access-configuration", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigAccessWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "access", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathConfigAccessRead, + logical.UpdateOperation: b.pathConfigAccessWrite, }, } } @@ -135,7 +116,7 @@ func (b *backend) pathConfigAccessWrite(ctx context.Context, req *logical.Reques } token, _, err := client.ACL().Bootstrap() if err != nil { - return logical.ErrorResponse("Token not provided and failed to bootstrap ACLs: %s", err), nil + return logical.ErrorResponse("Token not provided and failed to bootstrap ACLs"), err } config.Token = token.SecretID } diff --git a/builtin/logical/consul/path_roles.go b/builtin/logical/consul/path_roles.go index 3e8c059f53d5ac..fa513b5017d28f 100644 --- a/builtin/logical/consul/path_roles.go +++ b/builtin/logical/consul/path_roles.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( @@ -17,11 +14,6 @@ func pathListRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixConsul, - OperationSuffix: "roles", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathRoleList, }, @@ -31,12 +23,6 @@ func pathListRoles(b *backend) *framework.Path { func pathRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixConsul, - OperationSuffix: "role", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/consul/path_token.go b/builtin/logical/consul/path_token.go index 9c8dbd1d4457ea..7568774f39c6bd 100644 --- a/builtin/logical/consul/path_token.go +++ b/builtin/logical/consul/path_token.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( @@ -21,13 +18,6 @@ const ( func pathToken(b *backend) *framework.Path { return &framework.Path{ Pattern: "creds/" + framework.GenericNameRegex("role"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixConsul, - OperationVerb: "generate", - OperationSuffix: "credentials", - }, - Fields: map[string]*framework.FieldSchema{ "role": { Type: framework.TypeString, diff --git a/builtin/logical/consul/path_token_test.go b/builtin/logical/consul/path_token_test.go index 77e7f29ab128af..98e2b826fbcaef 100644 --- a/builtin/logical/consul/path_token_test.go +++ b/builtin/logical/consul/path_token_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/builtin/logical/consul/secret_token.go b/builtin/logical/consul/secret_token.go index f2f206b7026a7d..6dbccca014cef1 100644 --- a/builtin/logical/consul/secret_token.go +++ b/builtin/logical/consul/secret_token.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/builtin/logical/database/backend.go b/builtin/logical/database/backend.go index 94091e201947e2..e2e362fd5fa319 100644 --- a/builtin/logical/database/backend.go +++ b/builtin/logical/database/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( @@ -27,11 +24,10 @@ import ( ) const ( - operationPrefixDatabase = "database" - databaseConfigPath = "config/" - databaseRolePath = "role/" - databaseStaticRolePath = "static-role/" - minRootCredRollbackAge = 1 * time.Minute + databaseConfigPath = "config/" + databaseRolePath = "role/" + databaseStaticRolePath = "static-role/" + minRootCredRollbackAge = 1 * time.Minute ) type dbPluginInstance struct { diff --git a/builtin/logical/database/backend_test.go b/builtin/logical/database/backend_test.go index 574bcd01af6293..27ce027c959e25 100644 --- a/builtin/logical/database/backend_test.go +++ b/builtin/logical/database/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/credentials.go b/builtin/logical/database/credentials.go index c43c2648f03de6..a6e54678d0589a 100644 --- a/builtin/logical/database/credentials.go +++ b/builtin/logical/database/credentials.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( @@ -8,17 +5,12 @@ import ( "crypto/rand" "crypto/rsa" "crypto/x509" - "crypto/x509/pkix" "encoding/pem" "fmt" "io" "strings" - "time" "github.com/hashicorp/vault/helper/random" - "github.com/hashicorp/vault/sdk/database/dbplugin/v5" - "github.com/hashicorp/vault/sdk/helper/certutil" - "github.com/hashicorp/vault/sdk/helper/template" "github.com/mitchellh/mapstructure" ) @@ -175,217 +167,3 @@ func (kg rsaKeyGenerator) configMap() (map[string]interface{}, error) { } return config, nil } - -type ClientCertificateGenerator struct { - // CommonNameTemplate is username template to be used for the client certificate common name. - CommonNameTemplate string `mapstructure:"common_name_template,omitempty"` - - // CAPrivateKey is the PEM-encoded private key for the given ca_cert. - CAPrivateKey string `mapstructure:"ca_private_key,omitempty"` - - // CACert is the PEM-encoded CA certificate. - CACert string `mapstructure:"ca_cert,omitempty"` - - // KeyType specifies the desired key type. - // Options include: 'rsa', 'ed25519', 'ec'. - KeyType string `mapstructure:"key_type,omitempty"` - - // KeyBits is the number of bits to use for the generated keys. - // Options include: with key_type=rsa, 2048 (default), 3072, 4096; - // With key_type=ec, allowed values are: 224, 256 (default), 384, 521; - // Ignored with key_type=ed25519. - KeyBits int `mapstructure:"key_bits,omitempty"` - - // SignatureBits is the number of bits to use in the signature algorithm. - // Options include: 256 (default), 384, 512. - SignatureBits int `mapstructure:"signature_bits,omitempty"` - - parsedCABundle *certutil.ParsedCertBundle - cnProducer template.StringTemplate -} - -// newClientCertificateGenerator returns a new ClientCertificateGenerator -// using the given config. Default values will be set on the returned -// ClientCertificateGenerator if not provided in the config. -func newClientCertificateGenerator(config map[string]interface{}) (ClientCertificateGenerator, error) { - var cg ClientCertificateGenerator - if err := mapstructure.WeakDecode(config, &cg); err != nil { - return cg, err - } - - switch cg.KeyType { - case "rsa": - switch cg.KeyBits { - case 0: - cg.KeyBits = 2048 - case 2048, 3072, 4096: - default: - return cg, fmt.Errorf("invalid key_bits") - } - case "ec": - switch cg.KeyBits { - case 0: - cg.KeyBits = 256 - case 224, 256, 384, 521: - default: - return cg, fmt.Errorf("invalid key_bits") - } - case "ed25519": - // key_bits ignored - default: - return cg, fmt.Errorf("invalid key_type") - } - - switch cg.SignatureBits { - case 0: - cg.SignatureBits = 256 - case 256, 384, 512: - default: - return cg, fmt.Errorf("invalid signature_bits") - } - - if cg.CommonNameTemplate == "" { - return cg, fmt.Errorf("missing required common_name_template") - } - - // Validate the common name template - t, err := template.NewTemplate(template.Template(cg.CommonNameTemplate)) - if err != nil { - return cg, fmt.Errorf("failed to create template: %w", err) - } - - _, err = t.Generate(dbplugin.UsernameMetadata{}) - if err != nil { - return cg, fmt.Errorf("invalid common_name_template: %w", err) - } - cg.cnProducer = t - - if cg.CACert == "" { - return cg, fmt.Errorf("missing required ca_cert") - } - if cg.CAPrivateKey == "" { - return cg, fmt.Errorf("missing required ca_private_key") - } - parsedBundle, err := certutil.ParsePEMBundle(strings.Join([]string{cg.CACert, cg.CAPrivateKey}, "\n")) - if err != nil { - return cg, err - } - if parsedBundle.PrivateKey == nil { - return cg, fmt.Errorf("private key not found in the PEM bundle") - } - if parsedBundle.PrivateKeyType == certutil.UnknownPrivateKey { - return cg, fmt.Errorf("unknown private key found in the PEM bundle") - } - if parsedBundle.Certificate == nil { - return cg, fmt.Errorf("certificate not found in the PEM bundle") - } - if !parsedBundle.Certificate.IsCA { - return cg, fmt.Errorf("the given certificate is not marked for CA use") - } - if !parsedBundle.Certificate.BasicConstraintsValid { - return cg, fmt.Errorf("the given certificate does not meet basic constraints for CA use") - } - - certBundle, err := parsedBundle.ToCertBundle() - if err != nil { - return cg, fmt.Errorf("error converting raw values into cert bundle: %w", err) - } - - parsedCABundle, err := certBundle.ToParsedCertBundle() - if err != nil { - return cg, fmt.Errorf("failed to parse cert bundle: %w", err) - } - cg.parsedCABundle = parsedCABundle - - return cg, nil -} - -func (cg *ClientCertificateGenerator) generate(r io.Reader, expiration time.Time, userMeta dbplugin.UsernameMetadata) (*certutil.CertBundle, string, error) { - commonName, err := cg.cnProducer.Generate(userMeta) - if err != nil { - return nil, "", err - } - - // Set defaults - keyBits := cg.KeyBits - signatureBits := cg.SignatureBits - switch cg.KeyType { - case "rsa": - if keyBits == 0 { - keyBits = 2048 - } - if signatureBits == 0 { - signatureBits = 256 - } - case "ec": - if keyBits == 0 { - keyBits = 256 - } - if signatureBits == 0 { - if keyBits == 224 { - signatureBits = 256 - } else { - signatureBits = keyBits - } - } - case "ed25519": - // key_bits ignored - if signatureBits == 0 { - signatureBits = 256 - } - } - - subject := pkix.Name{ - CommonName: commonName, - // Additional subject DN options intentionally omitted for now - } - - creation := &certutil.CreationBundle{ - Params: &certutil.CreationParameters{ - Subject: subject, - KeyType: cg.KeyType, - KeyBits: cg.KeyBits, - SignatureBits: cg.SignatureBits, - NotAfter: expiration, - KeyUsage: x509.KeyUsageDigitalSignature, - ExtKeyUsage: certutil.ClientAuthExtKeyUsage, - BasicConstraintsValidForNonCA: false, - NotBeforeDuration: 30 * time.Second, - URLs: &certutil.URLEntries{ - IssuingCertificates: []string{}, - CRLDistributionPoints: []string{}, - OCSPServers: []string{}, - }, - }, - SigningBundle: &certutil.CAInfoBundle{ - ParsedCertBundle: *cg.parsedCABundle, - URLs: &certutil.URLEntries{ - IssuingCertificates: []string{}, - CRLDistributionPoints: []string{}, - OCSPServers: []string{}, - }, - }, - } - - parsedClientBundle, err := certutil.CreateCertificateWithRandomSource(creation, r) - if err != nil { - return nil, "", fmt.Errorf("unable to generate client certificate: %w", err) - } - - cb, err := parsedClientBundle.ToCertBundle() - if err != nil { - return nil, "", fmt.Errorf("error converting raw cert bundle to cert bundle: %w", err) - } - - return cb, subject.String(), nil -} - -// configMap returns the configuration of the ClientCertificateGenerator -// as a map from string to string. -func (cg ClientCertificateGenerator) configMap() (map[string]interface{}, error) { - config := make(map[string]interface{}) - if err := mapstructure.WeakDecode(cg, &config); err != nil { - return nil, err - } - return config, nil -} diff --git a/builtin/logical/database/credentials_test.go b/builtin/logical/database/credentials_test.go index 5e113e3b2310f6..32ddc26856fef5 100644 --- a/builtin/logical/database/credentials_test.go +++ b/builtin/logical/database/credentials_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( @@ -17,252 +14,6 @@ import ( "github.com/stretchr/testify/mock" ) -// Test_newClientCertificateGenerator tests the ClientCertificateGenerator struct based on the config -func Test_newClientCertificateGenerator(t *testing.T) { - type args struct { - config map[string]interface{} - } - tests := []struct { - name string - args args - want ClientCertificateGenerator - wantErr bool - }{ - { - name: "newClientCertificateGenerator with nil config", - args: args{ - config: nil, - }, - want: ClientCertificateGenerator{ - CommonNameTemplate: "", - CAPrivateKey: "", - CACert: "", - KeyType: "", - KeyBits: 0, - SignatureBits: 0, - }, - }, - { - name: "newClientCertificateGenerator with zero value key_type", - args: args{ - config: map[string]interface{}{ - "key_type": "", - }, - }, - want: ClientCertificateGenerator{ - KeyType: "", - }, - }, - { - name: "newClientCertificateGenerator with rsa value key_type", - args: args{ - config: map[string]interface{}{ - "key_type": "rsa", - }, - }, - want: ClientCertificateGenerator{ - KeyType: "rsa", - KeyBits: 2048, - SignatureBits: 256, - }, - }, - { - name: "newClientCertificateGenerator with ec value key_type", - args: args{ - config: map[string]interface{}{ - "key_type": "ec", - }, - }, - want: ClientCertificateGenerator{ - KeyType: "ec", - KeyBits: 256, - SignatureBits: 256, - }, - }, - { - name: "newClientCertificateGenerator with ed25519 value key_type", - args: args{ - config: map[string]interface{}{ - "key_type": "ed25519", - }, - }, - want: ClientCertificateGenerator{ - KeyType: "ed25519", - SignatureBits: 256, - }, - }, - { - name: "newClientCertificateGenerator with invalid key_type", - args: args{ - config: map[string]interface{}{ - "key_type": "ece", - }, - }, - wantErr: true, - }, - { - name: "newClientCertificateGenerator with zero value key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "0", - }, - }, - want: ClientCertificateGenerator{ - KeyBits: 0, - }, - }, - { - name: "newClientCertificateGenerator with 2048 value key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "2048", - }, - }, - want: ClientCertificateGenerator{ - KeyBits: 2048, - }, - }, - { - name: "newClientCertificateGenerator with 3072 value key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "3072", - }, - }, - want: ClientCertificateGenerator{ - KeyBits: 3072, - }, - }, - { - name: "newClientCertificateGenerator with 4096 value key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "4096", - }, - }, - want: ClientCertificateGenerator{ - KeyBits: 4096, - }, - }, - { - name: "newClientCertificateGenerator with 224 value key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "224", - }, - }, - want: ClientCertificateGenerator{ - KeyBits: 224, - }, - }, - { - name: "newClientCertificateGenerator with 256 value key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "256", - }, - }, - want: ClientCertificateGenerator{ - KeyBits: 256, - }, - }, - { - name: "newClientCertificateGenerator with 384 value key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "384", - }, - }, - want: ClientCertificateGenerator{ - KeyBits: 384, - }, - }, - { - name: "newClientCertificateGenerator with 521 value key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "521", - }, - }, - want: ClientCertificateGenerator{ - KeyBits: 521, - }, - }, - { - name: "newClientCertificateGenerator with invalid key_bits", - args: args{ - config: map[string]interface{}{ - "key_bits": "4097", - }, - }, - wantErr: true, - }, - { - name: "newClientCertificateGenerator with zero value signature_bits", - args: args{ - config: map[string]interface{}{ - "signature_bits": "0", - }, - }, - want: ClientCertificateGenerator{ - SignatureBits: 0, - }, - }, - { - name: "newClientCertificateGenerator with 256 value signature_bits", - args: args{ - config: map[string]interface{}{ - "signature_bits": "256", - }, - }, - want: ClientCertificateGenerator{ - SignatureBits: 256, - }, - }, - { - name: "newClientCertificateGenerator with 384 value signature_bits", - args: args{ - config: map[string]interface{}{ - "signature_bits": "384", - }, - }, - want: ClientCertificateGenerator{ - SignatureBits: 384, - }, - }, - { - name: "newClientCertificateGenerator with 512 value signature_bits", - args: args{ - config: map[string]interface{}{ - "signature_bits": "512", - }, - }, - want: ClientCertificateGenerator{ - SignatureBits: 512, - }, - }, - { - name: "newClientCertificateGenerator with invalid signature_bits", - args: args{ - config: map[string]interface{}{ - "signature_bits": "612", - }, - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := newClientCertificateGenerator(tt.args.config) - if tt.wantErr { - assert.Error(t, err) - return - } - assert.Equal(t, tt.want, got) - }) - } -} - func Test_newPasswordGenerator(t *testing.T) { type args struct { config map[string]interface{} diff --git a/builtin/logical/database/dbplugin/plugin_test.go b/builtin/logical/database/dbplugin/plugin_test.go index 2b5f7a981ec369..bea9e30ec7a58b 100644 --- a/builtin/logical/database/dbplugin/plugin_test.go +++ b/builtin/logical/database/dbplugin/plugin_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin_test import ( diff --git a/builtin/logical/database/mocks_test.go b/builtin/logical/database/mocks_test.go index afb1bbc79f68a3..13eb5300614266 100644 --- a/builtin/logical/database/mocks_test.go +++ b/builtin/logical/database/mocks_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/mockv4.go b/builtin/logical/database/mockv4.go index a85f307ec49d60..4f0b181683a85a 100644 --- a/builtin/logical/database/mockv4.go +++ b/builtin/logical/database/mockv4.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/mockv5.go b/builtin/logical/database/mockv5.go index fecccfed209f54..632cfb38e037f9 100644 --- a/builtin/logical/database/mockv5.go +++ b/builtin/logical/database/mockv5.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/path_config_connection.go b/builtin/logical/database/path_config_connection.go index b869facef0decc..9f1ad4cf5744d2 100644 --- a/builtin/logical/database/path_config_connection.go +++ b/builtin/logical/database/path_config_connection.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( @@ -62,13 +59,6 @@ func (c *DatabaseConfig) SupportsCredentialType(credentialType v5.CredentialType func pathResetConnection(b *databaseBackend) *framework.Path { return &framework.Path{ Pattern: fmt.Sprintf("reset/%s", framework.GenericNameRegex("name")), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationVerb: "reset", - OperationSuffix: "connection", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -113,11 +103,6 @@ func (b *databaseBackend) pathConnectionReset() framework.OperationFunc { func pathConfigurePluginConnection(b *databaseBackend) *framework.Path { return &framework.Path{ Pattern: fmt.Sprintf("config/%s", framework.GenericNameRegex("name")), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -164,36 +149,11 @@ func pathConfigurePluginConnection(b *databaseBackend) *framework.Path { }, ExistenceCheck: b.connectionExistenceCheck(), - - Operations: map[logical.Operation]framework.OperationHandler{ - logical.CreateOperation: &framework.PathOperation{ - Callback: b.connectionWriteHandler(), - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "connection", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.connectionWriteHandler(), - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "connection", - }, - }, - logical.ReadOperation: &framework.PathOperation{ - Callback: b.connectionReadHandler(), - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "read", - OperationSuffix: "connection-configuration", - }, - }, - logical.DeleteOperation: &framework.PathOperation{ - Callback: b.connectionDeleteHandler(), - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "delete", - OperationSuffix: "connection-configuration", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.CreateOperation: b.connectionWriteHandler(), + logical.UpdateOperation: b.connectionWriteHandler(), + logical.ReadOperation: b.connectionReadHandler(), + logical.DeleteOperation: b.connectionDeleteHandler(), }, HelpSynopsis: pathConfigConnectionHelpSyn, @@ -221,11 +181,6 @@ func pathListPluginConnection(b *databaseBackend) *framework.Path { return &framework.Path{ Pattern: fmt.Sprintf("config/?$"), - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationSuffix: "connections", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.connectionListHandler(), }, diff --git a/builtin/logical/database/path_config_connection_test.go b/builtin/logical/database/path_config_connection_test.go index 8cf06062890f5c..18f850dbce9258 100644 --- a/builtin/logical/database/path_config_connection_test.go +++ b/builtin/logical/database/path_config_connection_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/path_creds_create.go b/builtin/logical/database/path_creds_create.go index a0fe6a38456c92..e57516259fefbd 100644 --- a/builtin/logical/database/path_creds_create.go +++ b/builtin/logical/database/path_creds_create.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( @@ -9,7 +6,6 @@ import ( "time" "github.com/hashicorp/go-secure-stdlib/strutil" - "github.com/hashicorp/vault/sdk/database/dbplugin/v5" v5 "github.com/hashicorp/vault/sdk/database/dbplugin/v5" "github.com/hashicorp/vault/sdk/framework" "github.com/hashicorp/vault/sdk/logical" @@ -19,13 +15,6 @@ func pathCredsCreate(b *databaseBackend) []*framework.Path { return []*framework.Path{ { Pattern: "creds/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationVerb: "generate", - OperationSuffix: "credentials", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -42,13 +31,6 @@ func pathCredsCreate(b *databaseBackend) []*framework.Path { }, { Pattern: "static-creds/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationVerb: "read", - OperationSuffix: "static-role-credentials", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -172,27 +154,6 @@ func (b *databaseBackend) pathCredsCreateRead() framework.OperationFunc { // Set output credential respData["rsa_private_key"] = string(private) - case v5.CredentialTypeClientCertificate: - generator, err := newClientCertificateGenerator(role.CredentialConfig) - if err != nil { - return nil, fmt.Errorf("failed to construct credential generator: %s", err) - } - - // Generate the client certificate - cb, subject, err := generator.generate(b.GetRandomReader(), expiration, - newUserReq.UsernameConfig) - if err != nil { - return nil, fmt.Errorf("failed to generate client certificate: %w", err) - } - - // Set input credential - newUserReq.CredentialType = dbplugin.CredentialTypeClientCertificate - newUserReq.Subject = subject - - // Set output credential - respData["client_certificate"] = cb.Certificate - respData["private_key"] = cb.PrivateKey - respData["private_key_type"] = cb.PrivateKeyType } // Overwriting the password in the event this is a legacy database diff --git a/builtin/logical/database/path_roles.go b/builtin/logical/database/path_roles.go index ba4aa7f20e4b36..02a199c5326d8c 100644 --- a/builtin/logical/database/path_roles.go +++ b/builtin/logical/database/path_roles.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( @@ -24,12 +21,6 @@ func pathListRoles(b *databaseBackend) []*framework.Path { { Pattern: "roles/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationVerb: "list", - OperationSuffix: "roles", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathRoleList, }, @@ -40,12 +31,6 @@ func pathListRoles(b *databaseBackend) []*framework.Path { { Pattern: "static-roles/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationVerb: "list", - OperationSuffix: "static-roles", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathRoleList, }, @@ -59,11 +44,7 @@ func pathListRoles(b *databaseBackend) []*framework.Path { func pathRoles(b *databaseBackend) []*framework.Path { return []*framework.Path{ { - Pattern: "roles/" + framework.GenericNameRegex("name"), - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationSuffix: "role", - }, + Pattern: "roles/" + framework.GenericNameRegex("name"), Fields: fieldsForType(databaseRolePath), ExistenceCheck: b.pathRoleExistenceCheck, Callbacks: map[logical.Operation]framework.OperationFunc{ @@ -78,11 +59,7 @@ func pathRoles(b *databaseBackend) []*framework.Path { }, { - Pattern: "static-roles/" + framework.GenericNameRegex("name"), - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationSuffix: "static-role", - }, + Pattern: "static-roles/" + framework.GenericNameRegex("name"), Fields: fieldsForType(databaseStaticRolePath), ExistenceCheck: b.pathStaticRoleExistenceCheck, Callbacks: map[logical.Operation]framework.OperationFunc{ @@ -651,8 +628,6 @@ func (r *roleEntry) setCredentialType(credentialType string) error { r.CredentialType = v5.CredentialTypePassword case v5.CredentialTypeRSAPrivateKey.String(): r.CredentialType = v5.CredentialTypeRSAPrivateKey - case v5.CredentialTypeClientCertificate.String(): - r.CredentialType = v5.CredentialTypeClientCertificate default: return fmt.Errorf("invalid credential_type %q", credentialType) } @@ -694,18 +669,6 @@ func (r *roleEntry) setCredentialConfig(config map[string]string) error { if len(cm) > 0 { r.CredentialConfig = cm } - case v5.CredentialTypeClientCertificate: - generator, err := newClientCertificateGenerator(c) - if err != nil { - return err - } - cm, err := generator.configMap() - if err != nil { - return err - } - if len(cm) > 0 { - r.CredentialConfig = cm - } } return nil diff --git a/builtin/logical/database/path_roles_test.go b/builtin/logical/database/path_roles_test.go index dc2eddeb1a04f7..bfb20633896ef5 100644 --- a/builtin/logical/database/path_roles_test.go +++ b/builtin/logical/database/path_roles_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/path_rotate_credentials.go b/builtin/logical/database/path_rotate_credentials.go index 49081a3c3f404d..03a6845e1c5713 100644 --- a/builtin/logical/database/path_rotate_credentials.go +++ b/builtin/logical/database/path_rotate_credentials.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( @@ -19,13 +16,6 @@ func pathRotateRootCredentials(b *databaseBackend) []*framework.Path { return []*framework.Path{ { Pattern: "rotate-root/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationVerb: "rotate", - OperationSuffix: "root-credentials", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -46,13 +36,6 @@ func pathRotateRootCredentials(b *databaseBackend) []*framework.Path { }, { Pattern: "rotate-role/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixDatabase, - OperationVerb: "rotate", - OperationSuffix: "static-role-credentials", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -91,11 +74,6 @@ func (b *databaseBackend) pathRotateRootCredentialsUpdate() framework.OperationF return nil, fmt.Errorf("unable to rotate root credentials: no username in configuration") } - rootPassword, ok := config.ConnectionDetails["password"].(string) - if !ok || rootPassword == "" { - return nil, fmt.Errorf("unable to rotate root credentials: no password in configuration") - } - dbi, err := b.GetConnection(ctx, req.Storage, name) if err != nil { return nil, err diff --git a/builtin/logical/database/rollback.go b/builtin/logical/database/rollback.go index 22ce6168663e08..a9810e816643fa 100644 --- a/builtin/logical/database/rollback.go +++ b/builtin/logical/database/rollback.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/rollback_test.go b/builtin/logical/database/rollback_test.go index 8f36fe26a79573..dc061ae99a3fb8 100644 --- a/builtin/logical/database/rollback_test.go +++ b/builtin/logical/database/rollback_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/rotation.go b/builtin/logical/database/rotation.go index 1ef54aecac3260..5ae2756f279cd1 100644 --- a/builtin/logical/database/rotation.go +++ b/builtin/logical/database/rotation.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/rotation_test.go b/builtin/logical/database/rotation_test.go index e0cb96dd67cfb4..ccbd64588592d7 100644 --- a/builtin/logical/database/rotation_test.go +++ b/builtin/logical/database/rotation_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/secret_creds.go b/builtin/logical/database/secret_creds.go index fefa452a5d3564..9c9b348e2437ac 100644 --- a/builtin/logical/database/secret_creds.go +++ b/builtin/logical/database/secret_creds.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/version_wrapper.go b/builtin/logical/database/version_wrapper.go index daab17964d3e27..8c4db1388861ca 100644 --- a/builtin/logical/database/version_wrapper.go +++ b/builtin/logical/database/version_wrapper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/version_wrapper_test.go b/builtin/logical/database/version_wrapper_test.go index 95a5f7b6fa9530..054241f978a32c 100644 --- a/builtin/logical/database/version_wrapper_test.go +++ b/builtin/logical/database/version_wrapper_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database import ( diff --git a/builtin/logical/database/versioning_large_test.go b/builtin/logical/database/versioning_large_test.go index b39ddb7e1ce4a7..a9f7efde62a68f 100644 --- a/builtin/logical/database/versioning_large_test.go +++ b/builtin/logical/database/versioning_large_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package database // This file contains all "large"/expensive tests. These are running requests against a running backend diff --git a/builtin/logical/nomad/backend.go b/builtin/logical/nomad/backend.go index 0becdae776b12b..e1df32e87a8319 100644 --- a/builtin/logical/nomad/backend.go +++ b/builtin/logical/nomad/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package nomad import ( @@ -11,8 +8,6 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -const operationPrefixNomad = "nomad" - // Factory returns a Nomad backend that satisfies the logical.Backend interface func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b := Backend() diff --git a/builtin/logical/nomad/backend_test.go b/builtin/logical/nomad/backend_test.go index 1fe6adbb2d38e1..8452c2b019e41a 100644 --- a/builtin/logical/nomad/backend_test.go +++ b/builtin/logical/nomad/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package nomad import ( @@ -8,14 +5,13 @@ import ( "fmt" "os" "reflect" - "runtime" "strings" "testing" "time" nomadapi "github.com/hashicorp/nomad/api" "github.com/hashicorp/vault/helper/testhelpers" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" "github.com/hashicorp/vault/sdk/logical" "github.com/mitchellh/mapstructure" ) @@ -39,11 +35,6 @@ func (c *Config) Client() (*nomadapi.Client, error) { } func prepareTestContainer(t *testing.T, bootstrap bool) (func(), *Config) { - // Skipping on ARM, as this image can't run on ARM architecture - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as this image is not supported on ARM architectures") - } - if retAddress := os.Getenv("NOMAD_ADDR"); retAddress != "" { s, err := docker.NewServiceURLParse(retAddress) if err != nil { @@ -53,7 +44,7 @@ func prepareTestContainer(t *testing.T, bootstrap bool) (func(), *Config) { } runner, err := docker.NewServiceRunner(docker.RunOptions{ - ImageRepo: "docker.mirror.hashicorp.services/multani/nomad", + ImageRepo: "multani/nomad", ImageTag: "1.1.6", ContainerName: "nomad", Ports: []string{"4646/tcp"}, diff --git a/builtin/logical/nomad/cmd/nomad/main.go b/builtin/logical/nomad/cmd/nomad/main.go index 10f45aabb48371..31b1c93500e7db 100644 --- a/builtin/logical/nomad/cmd/nomad/main.go +++ b/builtin/logical/nomad/cmd/nomad/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: nomad.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/logical/nomad/path_config_access.go b/builtin/logical/nomad/path_config_access.go index cbb214002438f2..b482a9c1aca862 100644 --- a/builtin/logical/nomad/path_config_access.go +++ b/builtin/logical/nomad/path_config_access.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package nomad import ( @@ -16,11 +13,6 @@ const configAccessKey = "config/access" func pathConfigAccess(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/access", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixNomad, - }, - Fields: map[string]*framework.FieldSchema{ "address": { Type: framework.TypeString, @@ -53,35 +45,11 @@ must be x509 PEM encoded and if this is set you need to also set client_cert.`, }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigAccessRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "read", - OperationSuffix: "access-configuration", - }, - }, - logical.CreateOperation: &framework.PathOperation{ - Callback: b.pathConfigAccessWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "access", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigAccessWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "access", - }, - }, - logical.DeleteOperation: &framework.PathOperation{ - Callback: b.pathConfigAccessDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "delete", - OperationSuffix: "access-configuration", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathConfigAccessRead, + logical.CreateOperation: b.pathConfigAccessWrite, + logical.UpdateOperation: b.pathConfigAccessWrite, + logical.DeleteOperation: b.pathConfigAccessDelete, }, ExistenceCheck: b.configExistenceCheck, diff --git a/builtin/logical/nomad/path_config_lease.go b/builtin/logical/nomad/path_config_lease.go index 05c83ff938f8ac..676e515cb84b5f 100644 --- a/builtin/logical/nomad/path_config_lease.go +++ b/builtin/logical/nomad/path_config_lease.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package nomad import ( @@ -16,11 +13,6 @@ const leaseConfigKey = "config/lease" func pathConfigLease(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/lease", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixNomad, - }, - Fields: map[string]*framework.FieldSchema{ "ttl": { Type: framework.TypeDurationSecond, @@ -32,28 +24,10 @@ func pathConfigLease(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathLeaseRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "read", - OperationSuffix: "lease-configuration", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathLeaseUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "lease", - }, - }, - logical.DeleteOperation: &framework.PathOperation{ - Callback: b.pathLeaseDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "delete", - OperationSuffix: "lease-configuration", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathLeaseRead, + logical.UpdateOperation: b.pathLeaseUpdate, + logical.DeleteOperation: b.pathLeaseDelete, }, HelpSynopsis: pathConfigLeaseHelpSyn, diff --git a/builtin/logical/nomad/path_creds_create.go b/builtin/logical/nomad/path_creds_create.go index 29f84d136908d1..14df1ff939db54 100644 --- a/builtin/logical/nomad/path_creds_create.go +++ b/builtin/logical/nomad/path_creds_create.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package nomad import ( @@ -20,13 +17,6 @@ const maxTokenNameLength = 256 func pathCredsCreate(b *backend) *framework.Path { return &framework.Path{ Pattern: "creds/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixNomad, - OperationVerb: "generate", - OperationSuffix: "credentials", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/nomad/path_roles.go b/builtin/logical/nomad/path_roles.go index e3cebefb523299..92109ba741230c 100644 --- a/builtin/logical/nomad/path_roles.go +++ b/builtin/logical/nomad/path_roles.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package nomad import ( @@ -16,11 +13,6 @@ func pathListRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "role/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixNomad, - OperationSuffix: "roles", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathRoleList, }, @@ -30,12 +22,6 @@ func pathListRoles(b *backend) *framework.Path { func pathRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "role/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixNomad, - OperationSuffix: "role", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/nomad/secret_token.go b/builtin/logical/nomad/secret_token.go index 3c6b920681d370..fd446f7a6436f5 100644 --- a/builtin/logical/nomad/secret_token.go +++ b/builtin/logical/nomad/secret_token.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package nomad import ( diff --git a/builtin/logical/pki/acme_authorizations.go b/builtin/logical/pki/acme_authorizations.go deleted file mode 100644 index 82d439d88a5408..00000000000000 --- a/builtin/logical/pki/acme_authorizations.go +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "fmt" - "time" -) - -type ACMEIdentifierType string - -const ( - ACMEDNSIdentifier ACMEIdentifierType = "dns" - ACMEIPIdentifier ACMEIdentifierType = "ip" -) - -type ACMEIdentifier struct { - Type ACMEIdentifierType `json:"type"` - Value string `json:"value"` - OriginalValue string `json:"original_value"` - IsWildcard bool `json:"is_wildcard"` -} - -func (ai *ACMEIdentifier) MaybeParseWildcard() (bool, string, error) { - if ai.Type != ACMEDNSIdentifier || !isWildcardDomain(ai.Value) { - return false, ai.Value, nil - } - - // Here on out, technically it is a wildcard. - ai.IsWildcard = true - - wildcardLabel, reducedName, err := validateWildcardDomain(ai.Value) - if err != nil { - return true, "", err - } - - if wildcardLabel != "*" { - // Per RFC 8555 Section. 7.1.3. Order Objects: - // - // > Any identifier of type "dns" in a newOrder request MAY have a - // > wildcard domain name as its value. A wildcard domain name consists - // > of a single asterisk character followed by a single full stop - // > character ("*.") followed by a domain name as defined for use in the - // > Subject Alternate Name Extension by [RFC5280]. - return true, "", fmt.Errorf("wildcard must be entire left-most label") - } - - if reducedName == "" { - return true, "", fmt.Errorf("wildcard must not be entire domain name; need at least two domain labels") - } - - // Parsing was indeed successful, so update our reduced name. - ai.Value = reducedName - - return true, reducedName, nil -} - -func (ai *ACMEIdentifier) NetworkMarshal(useOriginalValue bool) map[string]interface{} { - value := ai.OriginalValue - if !useOriginalValue { - value = ai.Value - } - return map[string]interface{}{ - "type": ai.Type, - "value": value, - } -} - -type ACMEAuthorizationStatusType string - -const ( - ACMEAuthorizationPending ACMEAuthorizationStatusType = "pending" - ACMEAuthorizationValid ACMEAuthorizationStatusType = "valid" - ACMEAuthorizationInvalid ACMEAuthorizationStatusType = "invalid" - ACMEAuthorizationDeactivated ACMEAuthorizationStatusType = "deactivated" - ACMEAuthorizationExpired ACMEAuthorizationStatusType = "expired" - ACMEAuthorizationRevoked ACMEAuthorizationStatusType = "revoked" -) - -type ACMEOrderStatusType string - -const ( - ACMEOrderPending ACMEOrderStatusType = "pending" - ACMEOrderProcessing ACMEOrderStatusType = "processing" - ACMEOrderValid ACMEOrderStatusType = "valid" - ACMEOrderInvalid ACMEOrderStatusType = "invalid" - ACMEOrderReady ACMEOrderStatusType = "ready" -) - -type ACMEChallengeType string - -const ( - ACMEHTTPChallenge ACMEChallengeType = "http-01" - ACMEDNSChallenge ACMEChallengeType = "dns-01" - ACMEALPNChallenge ACMEChallengeType = "tls-alpn-01" -) - -type ACMEChallengeStatusType string - -const ( - ACMEChallengePending ACMEChallengeStatusType = "pending" - ACMEChallengeProcessing ACMEChallengeStatusType = "processing" - ACMEChallengeValid ACMEChallengeStatusType = "valid" - ACMEChallengeInvalid ACMEChallengeStatusType = "invalid" -) - -type ACMEChallenge struct { - Type ACMEChallengeType `json:"type"` - Status ACMEChallengeStatusType `json:"status"` - Validated string `json:"validated,optional"` - Error map[string]interface{} `json:"error,optional"` - ChallengeFields map[string]interface{} `json:"challenge_fields"` -} - -func (ac *ACMEChallenge) NetworkMarshal(acmeCtx *acmeContext, authId string) map[string]interface{} { - resp := map[string]interface{}{ - "type": ac.Type, - "url": buildChallengeUrl(acmeCtx, authId, string(ac.Type)), - "status": ac.Status, - } - - if ac.Validated != "" { - resp["validated"] = ac.Validated - } - - if len(ac.Error) > 0 { - resp["error"] = ac.Error - } - - for field, value := range ac.ChallengeFields { - resp[field] = value - } - - return resp -} - -func buildChallengeUrl(acmeCtx *acmeContext, authId, challengeType string) string { - return acmeCtx.baseUrl.JoinPath("/challenge/", authId, challengeType).String() -} - -type ACMEAuthorization struct { - Id string `json:"id"` - AccountId string `json:"account_id"` - - Identifier *ACMEIdentifier `json:"identifier"` - Status ACMEAuthorizationStatusType `json:"status"` - - // Per RFC 8555 Section 7.1.4. Authorization Objects: - // - // > This field is REQUIRED for objects with "valid" in the "status" - // > field. - Expires string `json:"expires,optional"` - - Challenges []*ACMEChallenge `json:"challenges"` - Wildcard bool `json:"wildcard"` -} - -func (aa *ACMEAuthorization) GetExpires() (time.Time, error) { - if aa.Expires == "" { - return time.Time{}, nil - } - - return time.Parse(time.RFC3339, aa.Expires) -} - -func (aa *ACMEAuthorization) NetworkMarshal(acmeCtx *acmeContext) map[string]interface{} { - resp := map[string]interface{}{ - "identifier": aa.Identifier.NetworkMarshal( /* use value, not original value */ false), - "status": aa.Status, - "wildcard": aa.Wildcard, - } - - if aa.Expires != "" { - resp["expires"] = aa.Expires - } - - if len(aa.Challenges) > 0 { - challenges := []map[string]interface{}{} - for _, challenge := range aa.Challenges { - challenges = append(challenges, challenge.NetworkMarshal(acmeCtx, aa.Id)) - } - resp["challenges"] = challenges - } - - return resp -} diff --git a/builtin/logical/pki/acme_billing.go b/builtin/logical/pki/acme_billing.go deleted file mode 100644 index 642e0f4fcdd381..00000000000000 --- a/builtin/logical/pki/acme_billing.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "context" - "fmt" - - "github.com/hashicorp/vault/sdk/logical" -) - -func (b *backend) doTrackBilling(ctx context.Context, identifiers []*ACMEIdentifier) error { - billingView, ok := b.System().(logical.ACMEBillingSystemView) - if !ok { - return fmt.Errorf("failed to perform cast to ACME billing system view interface") - } - - var realized []string - for _, identifier := range identifiers { - realized = append(realized, fmt.Sprintf("%s/%s", identifier.Type, identifier.OriginalValue)) - } - - return billingView.CreateActivityCountEventForIdentifiers(ctx, realized) -} diff --git a/builtin/logical/pki/acme_billing_test.go b/builtin/logical/pki/acme_billing_test.go deleted file mode 100644 index 3a240f1875d6a0..00000000000000 --- a/builtin/logical/pki/acme_billing_test.go +++ /dev/null @@ -1,318 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "context" - "crypto/ecdsa" - "crypto/elliptic" - "crypto/rand" - "crypto/x509" - "crypto/x509/pkix" - "encoding/json" - "strings" - "testing" - "time" - - "golang.org/x/crypto/acme" - - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/builtin/logical/pki/dnstest" - "github.com/hashicorp/vault/helper/constants" - "github.com/hashicorp/vault/helper/timeutil" - "github.com/hashicorp/vault/vault" - "github.com/hashicorp/vault/vault/activity" - - "github.com/stretchr/testify/require" -) - -// TestACMEBilling is a basic test that will validate client counts created via ACME workflows. -func TestACMEBilling(t *testing.T) { - t.Parallel() - timeutil.SkipAtEndOfMonth(t) - - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - dns := dnstest.SetupResolver(t, "dadgarcorp.com") - defer dns.Cleanup() - - // Enable additional mounts. - setupAcmeBackendOnClusterAtPath(t, cluster, client, "pki2") - setupAcmeBackendOnClusterAtPath(t, cluster, client, "ns1/pki") - setupAcmeBackendOnClusterAtPath(t, cluster, client, "ns2/pki") - - // Enable custom DNS resolver for testing. - for _, mount := range []string{"pki", "pki2", "ns1/pki", "ns2/pki"} { - _, err := client.Logical().Write(mount+"/config/acme", map[string]interface{}{ - "dns_resolver": dns.GetLocalAddr(), - }) - require.NoError(t, err, "failed to set local dns resolver address for testing on mount: "+mount) - } - - // Enable client counting. - _, err := client.Logical().Write("/sys/internal/counters/config", map[string]interface{}{ - "enabled": "enable", - }) - require.NoError(t, err, "failed to enable client counting") - - // Setup ACME clients. We refresh account keys each time for consistency. - acmeClientPKI := getAcmeClientForCluster(t, cluster, "/v1/pki/acme/", nil) - acmeClientPKI2 := getAcmeClientForCluster(t, cluster, "/v1/pki2/acme/", nil) - acmeClientPKINS1 := getAcmeClientForCluster(t, cluster, "/v1/ns1/pki/acme/", nil) - acmeClientPKINS2 := getAcmeClientForCluster(t, cluster, "/v1/ns2/pki/acme/", nil) - - // Get our initial count. - expectedCount := validateClientCount(t, client, "", -1, "initial fetch") - - // Unique identifier: should increase by one. - doACMEForDomainWithDNS(t, dns, acmeClientPKI, []string{"dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "pki", expectedCount+1, "new certificate") - - // Different identifier; should increase by one. - doACMEForDomainWithDNS(t, dns, acmeClientPKI, []string{"example.dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "pki", expectedCount+1, "new certificate") - - // While same identifiers, used together and so thus are unique; increase by one. - doACMEForDomainWithDNS(t, dns, acmeClientPKI, []string{"example.dadgarcorp.com", "dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "pki", expectedCount+1, "new certificate") - - // Same identifiers in different order are not unique; keep the same. - doACMEForDomainWithDNS(t, dns, acmeClientPKI, []string{"dadgarcorp.com", "example.dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "pki", expectedCount, "different order; same identifiers") - - // Using a different mount shouldn't affect counts. - doACMEForDomainWithDNS(t, dns, acmeClientPKI2, []string{"dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "", expectedCount, "different mount; same identifiers") - - // But using a different identifier should. - doACMEForDomainWithDNS(t, dns, acmeClientPKI2, []string{"pki2.dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "pki2", expectedCount+1, "different mount with different identifiers") - - // A new identifier in a unique namespace will affect results. - doACMEForDomainWithDNS(t, dns, acmeClientPKINS1, []string{"unique.dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "ns1/pki", expectedCount+1, "unique identifier in a namespace") - - // But in a different namespace with the existing identifier will not. - doACMEForDomainWithDNS(t, dns, acmeClientPKINS2, []string{"unique.dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "", expectedCount, "existing identifier in a namespace") - doACMEForDomainWithDNS(t, dns, acmeClientPKI2, []string{"unique.dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "", expectedCount, "existing identifier outside of a namespace") - - // Creating a unique identifier in a namespace with a mount with the - // same name as another namespace should increase counts as well. - doACMEForDomainWithDNS(t, dns, acmeClientPKINS2, []string{"very-unique.dadgarcorp.com"}) - expectedCount = validateClientCount(t, client, "ns2/pki", expectedCount+1, "unique identifier in a different namespace") - - // Check the current fragment - fragment := cluster.Cores[0].Core.ResetActivityLog()[0] - if fragment == nil { - t.Fatal("no fragment created") - } - validateAcmeClientTypes(t, fragment, expectedCount) -} - -func validateAcmeClientTypes(t *testing.T, fragment *activity.LogFragment, expectedCount int64) { - t.Helper() - if int64(len(fragment.Clients)) != expectedCount { - t.Fatalf("bad number of entities, expected %v: got %v, entities are: %v", expectedCount, len(fragment.Clients), fragment.Clients) - } - - for _, ac := range fragment.Clients { - if ac.ClientType != vault.ACMEActivityType { - t.Fatalf("Couldn't find expected '%v' client_type in %v", vault.ACMEActivityType, fragment.Clients) - } - } -} - -func validateClientCount(t *testing.T, client *api.Client, mount string, expected int64, message string) int64 { - resp, err := client.Logical().Read("/sys/internal/counters/activity/monthly") - require.NoError(t, err, "failed to fetch client count values") - t.Logf("got client count numbers: %v", resp) - - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.Contains(t, resp.Data, "non_entity_clients") - require.Contains(t, resp.Data, "months") - - rawCount := resp.Data["non_entity_clients"].(json.Number) - count, err := rawCount.Int64() - require.NoError(t, err, "failed to parse number as int64: "+rawCount.String()) - - if expected != -1 { - require.Equal(t, expected, count, "value of client counts did not match expectations: "+message) - } - - if mount == "" { - return count - } - - months := resp.Data["months"].([]interface{}) - if len(months) > 1 { - t.Fatalf("running across a month boundary despite using SkipAtEndOfMonth(...); rerun test from start fully in the next month instead") - } - - require.Equal(t, 1, len(months), "expected only a single month when running this test") - - monthlyInfo := months[0].(map[string]interface{}) - - // Validate this month's aggregate counts match the overall value. - require.Contains(t, monthlyInfo, "counts", "expected monthly info to contain a count key") - monthlyCounts := monthlyInfo["counts"].(map[string]interface{}) - require.Contains(t, monthlyCounts, "non_entity_clients", "expected month[0].counts to contain a non_entity_clients key") - monthlyCountNonEntityRaw := monthlyCounts["non_entity_clients"].(json.Number) - monthlyCountNonEntity, err := monthlyCountNonEntityRaw.Int64() - require.NoError(t, err, "failed to parse number as int64: "+monthlyCountNonEntityRaw.String()) - require.Equal(t, count, monthlyCountNonEntity, "expected equal values for non entity client counts") - - // Validate this mount's namespace is included in the namespaces list, - // if this is enterprise. Otherwise, if its OSS or we don't have a - // namespace, we default to the value root. - mountNamespace := "" - mountPath := mount + "/" - if constants.IsEnterprise && strings.Contains(mount, "/") { - pieces := strings.Split(mount, "/") - require.Equal(t, 2, len(pieces), "we do not support nested namespaces in this test") - mountNamespace = pieces[0] + "/" - mountPath = pieces[1] + "/" - } - - require.Contains(t, monthlyInfo, "namespaces", "expected monthly info to contain a namespaces key") - monthlyNamespaces := monthlyInfo["namespaces"].([]interface{}) - foundNamespace := false - for index, namespaceRaw := range monthlyNamespaces { - namespace := namespaceRaw.(map[string]interface{}) - require.Contains(t, namespace, "namespace_path", "expected monthly.namespaces[%v] to contain a namespace_path key", index) - namespacePath := namespace["namespace_path"].(string) - - if namespacePath != mountNamespace { - t.Logf("skipping non-matching namespace %v: %v != %v / %v", index, namespacePath, mountNamespace, namespace) - continue - } - - foundNamespace = true - - // This namespace must have a non-empty aggregate non-entity count. - require.Contains(t, namespace, "counts", "expected monthly.namespaces[%v] to contain a counts key", index) - namespaceCounts := namespace["counts"].(map[string]interface{}) - require.Contains(t, namespaceCounts, "non_entity_clients", "expected namespace counts to contain a non_entity_clients key") - namespaceCountNonEntityRaw := namespaceCounts["non_entity_clients"].(json.Number) - namespaceCountNonEntity, err := namespaceCountNonEntityRaw.Int64() - require.NoError(t, err, "failed to parse number as int64: "+namespaceCountNonEntityRaw.String()) - require.Greater(t, namespaceCountNonEntity, int64(0), "expected at least one non-entity client count value in the namespace") - - require.Contains(t, namespace, "mounts", "expected monthly.namespaces[%v] to contain a mounts key", index) - namespaceMounts := namespace["mounts"].([]interface{}) - foundMount := false - for mountIndex, mountRaw := range namespaceMounts { - mountInfo := mountRaw.(map[string]interface{}) - require.Contains(t, mountInfo, "mount_path", "expected monthly.namespaces[%v].mounts[%v] to contain a mount_path key", index, mountIndex) - mountInfoPath := mountInfo["mount_path"].(string) - if mountPath != mountInfoPath { - t.Logf("skipping non-matching mount path %v in namespace %v: %v != %v / %v of %v", mountIndex, index, mountPath, mountInfoPath, mountInfo, namespace) - continue - } - - foundMount = true - - // This mount must also have a non-empty non-entity client count. - require.Contains(t, mountInfo, "counts", "expected monthly.namespaces[%v].mounts[%v] to contain a counts key", index, mountIndex) - mountCounts := mountInfo["counts"].(map[string]interface{}) - require.Contains(t, mountCounts, "non_entity_clients", "expected mount counts to contain a non_entity_clients key") - mountCountNonEntityRaw := mountCounts["non_entity_clients"].(json.Number) - mountCountNonEntity, err := mountCountNonEntityRaw.Int64() - require.NoError(t, err, "failed to parse number as int64: "+mountCountNonEntityRaw.String()) - require.Greater(t, mountCountNonEntity, int64(0), "expected at least one non-entity client count value in the mount") - } - - require.True(t, foundMount, "expected to find the mount "+mountPath+" in the list of mounts for namespace, but did not") - } - - require.True(t, foundNamespace, "expected to find the namespace "+mountNamespace+" in the list of namespaces, but did not") - - return count -} - -func doACMEForDomainWithDNS(t *testing.T, dns *dnstest.TestServer, acmeClient *acme.Client, domains []string) *x509.Certificate { - cr := &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: domains[0]}, - DNSNames: domains, - } - - accountKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed to generate account key") - acmeClient.Key = accountKey - - testCtx, cancelFunc := context.WithTimeout(context.Background(), 2*time.Minute) - defer cancelFunc() - - // Register the client. - _, err = acmeClient.Register(testCtx, &acme.Account{Contact: []string{"mailto:ipsans@dadgarcorp.com"}}, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - - // Create the Order - var orderIdentifiers []acme.AuthzID - for _, domain := range domains { - orderIdentifiers = append(orderIdentifiers, acme.AuthzID{Type: "dns", Value: domain}) - } - order, err := acmeClient.AuthorizeOrder(testCtx, orderIdentifiers) - require.NoError(t, err, "failed creating ACME order") - - // Fetch its authorizations. - var auths []*acme.Authorization - for _, authUrl := range order.AuthzURLs { - authorization, err := acmeClient.GetAuthorization(testCtx, authUrl) - require.NoError(t, err, "failed to lookup authorization at url: %s", authUrl) - auths = append(auths, authorization) - } - - // For each dns-01 challenge, place the record in the associated DNS resolver. - var challengesToAccept []*acme.Challenge - for _, auth := range auths { - for _, challenge := range auth.Challenges { - if challenge.Status != acme.StatusPending { - t.Logf("ignoring challenge not in status pending: %v", challenge) - continue - } - - if challenge.Type == "dns-01" { - challengeBody, err := acmeClient.DNS01ChallengeRecord(challenge.Token) - require.NoError(t, err, "failed generating challenge response") - - dns.AddRecord("_acme-challenge."+auth.Identifier.Value, "TXT", challengeBody) - defer dns.RemoveRecord("_acme-challenge."+auth.Identifier.Value, "TXT", challengeBody) - - require.NoError(t, err, "failed setting DNS record") - - challengesToAccept = append(challengesToAccept, challenge) - } - } - } - - dns.PushConfig() - require.GreaterOrEqual(t, len(challengesToAccept), 1, "Need at least one challenge, got none") - - // Tell the ACME server, that they can now validate those challenges. - for _, challenge := range challengesToAccept { - _, err = acmeClient.Accept(testCtx, challenge) - require.NoError(t, err, "failed to accept challenge: %v", challenge) - } - - // Wait for the order/challenges to be validated. - _, err = acmeClient.WaitOrder(testCtx, order.URI) - require.NoError(t, err, "failed waiting for order to be ready") - - // Create/sign the CSR and ask ACME server to sign it returning us the final certificate - csrKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - csr, err := x509.CreateCertificateRequest(rand.Reader, cr, csrKey) - require.NoError(t, err, "failed generating csr") - - certs, _, err := acmeClient.CreateOrderCert(testCtx, order.FinalizeURL, csr, false) - require.NoError(t, err, "failed to get a certificate back from ACME") - - acmeCert, err := x509.ParseCertificate(certs[0]) - require.NoError(t, err, "failed parsing acme cert bytes") - - return acmeCert -} diff --git a/builtin/logical/pki/acme_challenge_engine.go b/builtin/logical/pki/acme_challenge_engine.go deleted file mode 100644 index 5103a0376e7e9f..00000000000000 --- a/builtin/logical/pki/acme_challenge_engine.go +++ /dev/null @@ -1,535 +0,0 @@ -package pki - -import ( - "container/list" - "context" - "fmt" - "sync" - "time" - - "github.com/hashicorp/vault/sdk/logical" -) - -var MaxChallengeTimeout = 1 * time.Minute - -const MaxRetryAttempts = 5 - -const ChallengeAttemptFailedMsg = "this may occur if the validation target was misconfigured: check that challenge responses are available at the required locations and retry." - -type ChallengeValidation struct { - // Account KID that this validation attempt is recorded under. - Account string `json:"account"` - - // The authorization ID that this validation attempt is for. - Authorization string `json:"authorization"` - ChallengeType ACMEChallengeType `json:"challenge_type"` - - // The token of this challenge and the JWS thumbprint of the account - // we're validating against. - Token string `json:"token"` - Thumbprint string `json:"thumbprint"` - - Initiated time.Time `json:"initiated"` - FirstValidation time.Time `json:"first_validation,omitempty"` - RetryCount int `json:"retry_count,omitempty"` - LastRetry time.Time `json:"last_retry,omitempty"` - RetryAfter time.Time `json:"retry_after,omitempty"` -} - -type ChallengeQueueEntry struct { - Identifier string - RetryAfter time.Time -} - -type ACMEChallengeEngine struct { - NumWorkers int - - ValidationLock sync.Mutex - NewValidation chan string - Closing chan struct{} - Validations *list.List -} - -func NewACMEChallengeEngine() *ACMEChallengeEngine { - ace := &ACMEChallengeEngine{} - ace.NewValidation = make(chan string, 1) - ace.Closing = make(chan struct{}, 1) - ace.Validations = list.New() - ace.NumWorkers = 5 - - return ace -} - -func (ace *ACMEChallengeEngine) LoadFromStorage(b *backend, sc *storageContext) error { - items, err := sc.Storage.List(sc.Context, acmeValidationPrefix) - if err != nil { - return fmt.Errorf("failed loading list of validations from disk: %w", err) - } - - ace.ValidationLock.Lock() - defer ace.ValidationLock.Unlock() - - // Add them to our queue of validations to work through later. - foundExistingValidations := false - for _, item := range items { - ace.Validations.PushBack(&ChallengeQueueEntry{ - Identifier: item, - }) - foundExistingValidations = true - } - - if foundExistingValidations { - ace.NewValidation <- "existing" - } - - return nil -} - -func (ace *ACMEChallengeEngine) Run(b *backend, state *acmeState, sc *storageContext) { - // We load the existing ACME challenges within the Run thread to avoid - // delaying the PKI mount initialization - b.Logger().Debug("Loading existing challenge validations on disk") - err := ace.LoadFromStorage(b, sc) - if err != nil { - b.Logger().Error("failed loading existing ACME challenge validations:", "err", err) - } - - for true { - // err == nil on shutdown. - b.Logger().Debug("Starting ACME challenge validation engine") - err := ace._run(b, state) - if err != nil { - b.Logger().Error("Got unexpected error from ACME challenge validation engine", "err", err) - time.Sleep(1 * time.Second) - continue - } - break - } -} - -func (ace *ACMEChallengeEngine) _run(b *backend, state *acmeState) error { - // This runner uses a background context for storage operations: we don't - // want to tie it to a inbound request and we don't want to set a time - // limit, so create a fresh background context. - runnerSC := b.makeStorageContext(context.Background(), b.storage) - - // We want at most a certain number of workers operating to verify - // challenges. - var finishedWorkersChannels []chan bool - for true { - // Wait until we've got more work to do. - select { - case <-ace.Closing: - b.Logger().Debug("shutting down ACME challenge validation engine") - return nil - case <-ace.NewValidation: - } - - // First try to reap any finished workers. Read from their channels - // and if not finished yet, add to a fresh slice. - var newFinishedWorkersChannels []chan bool - for _, channel := range finishedWorkersChannels { - select { - case <-channel: - default: - // This channel had not been written to, indicating that the - // worker had not yet finished. - newFinishedWorkersChannels = append(newFinishedWorkersChannels, channel) - } - } - finishedWorkersChannels = newFinishedWorkersChannels - - // If we have space to take on another work item, do so. - firstIdentifier := "" - startedWork := false - now := time.Now() - for len(finishedWorkersChannels) < ace.NumWorkers { - var task *ChallengeQueueEntry - - // Find our next work item. We do all of these operations - // while holding the queue lock, hence some repeated checks - // afterwards. Out of this, we get a candidate task, using - // element == nil as a sentinel for breaking our parent - // loop. - ace.ValidationLock.Lock() - element := ace.Validations.Front() - if element != nil { - ace.Validations.Remove(element) - task = element.Value.(*ChallengeQueueEntry) - if !task.RetryAfter.IsZero() && now.Before(task.RetryAfter) { - // We cannot work on this element yet; remove it to - // the back of the queue. This allows us to potentially - // select the next item in the next iteration. - ace.Validations.PushBack(task) - } - - if firstIdentifier != "" && task.Identifier == firstIdentifier { - // We found and rejected this element before; exit the - // loop by "claiming" we didn't find any work. - element = nil - } else if firstIdentifier == "" { - firstIdentifier = task.Identifier - } - } - ace.ValidationLock.Unlock() - if element == nil { - // There was no more work to do to fill up the queue; exit - // this loop. - break - } - if now.Before(task.RetryAfter) { - // Here, while we found an element, we didn't want to - // completely exit the loop (perhaps it was our first time - // finding a work order), so retry without modifying - // firstIdentifier. - continue - } - - config, err := state.getConfigWithUpdate(runnerSC) - if err != nil { - return fmt.Errorf("failed fetching ACME configuration: %w", err) - } - - // Since this work item was valid, we won't expect to see it in - // the validation queue again until it is executed. Here, we - // want to avoid infinite looping above (if we removed the one - // valid item and the remainder are all not immediately - // actionable). At the worst, we'll spend a little more time - // looping through the queue until we hit a repeat. - firstIdentifier = "" - - // Here, we got a piece of work that is ready to check; create a - // channel and a new go routine and run it. Note that this still - // could have a RetryAfter date we're not aware of (e.g., if the - // cluster restarted as we do not read the entries there). - channel := make(chan bool, 1) - go ace.VerifyChallenge(runnerSC, task.Identifier, channel, config) - finishedWorkersChannels = append(finishedWorkersChannels, channel) - startedWork = true - } - - // If we have no more capacity for work, we should pause a little to - // let the system catch up. Additionally, if we only had - // non-actionable work items, we should pause until some time has - // elapsed: not too much that we potentially starve any new incoming - // items from validation, but not too short that we cause a busy loop. - if len(finishedWorkersChannels) == ace.NumWorkers || !startedWork { - time.Sleep(100 * time.Millisecond) - } - - // Lastly, if we have more work to do, re-trigger ourselves. - ace.ValidationLock.Lock() - if ace.Validations.Front() != nil { - select { - case ace.NewValidation <- "retry": - default: - } - } - ace.ValidationLock.Unlock() - } - - return fmt.Errorf("unexpectedly exited from ACMEChallengeEngine._run()") -} - -func (ace *ACMEChallengeEngine) AcceptChallenge(sc *storageContext, account string, authz *ACMEAuthorization, challenge *ACMEChallenge, thumbprint string) error { - name := authz.Id + "-" + string(challenge.Type) - path := acmeValidationPrefix + name - - entry, err := sc.Storage.Get(sc.Context, path) - if err == nil && entry != nil { - // Challenge already in the queue; exit without re-adding it. - return nil - } - - if authz.Status != ACMEAuthorizationPending { - return fmt.Errorf("%w: cannot accept already validated authorization %v (%v)", ErrMalformed, authz.Id, authz.Status) - } - - for _, otherChallenge := range authz.Challenges { - // We assume within an authorization we won't have multiple challenges of the same challenge type - // and we want to limit a single challenge being in a processing state to avoid race conditions - // failing one challenge and passing another. - if otherChallenge.Type != challenge.Type && otherChallenge.Status != ACMEChallengePending { - return fmt.Errorf("%w: only a single challenge within an authorization can be accepted (%v) in status %v", ErrMalformed, otherChallenge.Type, otherChallenge.Status) - } - - // The requested challenge can ping us to wake us up, so allow pending and currently processing statuses - if otherChallenge.Status != ACMEChallengePending && otherChallenge.Status != ACMEChallengeProcessing { - return fmt.Errorf("%w: challenge is in invalid state (%v) in authorization %v", ErrMalformed, challenge.Status, authz.Id) - } - } - - token := challenge.ChallengeFields["token"].(string) - - cv := &ChallengeValidation{ - Account: account, - Authorization: authz.Id, - ChallengeType: challenge.Type, - Token: token, - Thumbprint: thumbprint, - Initiated: time.Now(), - } - - json, err := logical.StorageEntryJSON(path, &cv) - if err != nil { - return fmt.Errorf("error creating challenge validation queue entry: %w", err) - } - - if err := sc.Storage.Put(sc.Context, json); err != nil { - return fmt.Errorf("error writing challenge validation entry: %w", err) - } - - if challenge.Status == ACMEChallengePending { - challenge.Status = ACMEChallengeProcessing - - authzPath := getAuthorizationPath(account, authz.Id) - if err := saveAuthorizationAtPath(sc, authzPath, authz); err != nil { - return fmt.Errorf("error saving updated authorization %v: %w", authz.Id, err) - } - } - - ace.ValidationLock.Lock() - defer ace.ValidationLock.Unlock() - ace.Validations.PushBack(&ChallengeQueueEntry{ - Identifier: name, - }) - - select { - case ace.NewValidation <- name: - default: - } - - return nil -} - -func (ace *ACMEChallengeEngine) VerifyChallenge(runnerSc *storageContext, id string, finished chan bool, config *acmeConfigEntry) { - sc, _ /* cancel func */ := runnerSc.WithFreshTimeout(MaxChallengeTimeout) - runnerSc.Backend.Logger().Debug("Starting verification of challenge", "id", id) - - if retry, retryAfter, err := ace._verifyChallenge(sc, id, config); err != nil { - // Because verification of this challenge failed, we need to retry - // it in the future. Log the error and re-add the item to the queue - // to try again later. - sc.Backend.Logger().Error(fmt.Sprintf("ACME validation failed for %v: %v", id, err)) - - if retry { - ace.ValidationLock.Lock() - defer ace.ValidationLock.Unlock() - ace.Validations.PushBack(&ChallengeQueueEntry{ - Identifier: id, - RetryAfter: retryAfter, - }) - - // Let the validator know there's a pending challenge. - select { - case ace.NewValidation <- id: - default: - } - } - - // We're the only producer on this channel and it has a buffer size - // of one element, so it is safe to directly write here. - finished <- true - return - } - - // We're the only producer on this channel and it has a buffer size of one - // element, so it is safe to directly write here. - finished <- false -} - -func (ace *ACMEChallengeEngine) _verifyChallenge(sc *storageContext, id string, config *acmeConfigEntry) (bool, time.Time, error) { - now := time.Now() - path := acmeValidationPrefix + id - challengeEntry, err := sc.Storage.Get(sc.Context, path) - if err != nil { - return true, now, fmt.Errorf("error loading challenge %v: %w", id, err) - } - - if challengeEntry == nil { - // Something must've successfully cleaned up our storage entry from - // under us. Assume we don't need to rerun, else the client will - // trigger us to re-run. - err = nil - return ace._verifyChallengeCleanup(sc, err, id) - } - - var cv *ChallengeValidation - if err := challengeEntry.DecodeJSON(&cv); err != nil { - return true, now, fmt.Errorf("error decoding challenge %v: %w", id, err) - } - - if now.Before(cv.RetryAfter) { - return true, cv.RetryAfter, fmt.Errorf("retrying challenge %v too soon", id) - } - - authzPath := getAuthorizationPath(cv.Account, cv.Authorization) - authz, err := loadAuthorizationAtPath(sc, authzPath) - if err != nil { - return true, now, fmt.Errorf("error loading authorization %v/%v for challenge %v: %w", cv.Account, cv.Authorization, id, err) - } - - if authz.Status != ACMEAuthorizationPending { - // Something must've finished up this challenge for us. Assume we - // don't need to rerun and exit instead. - err = nil - return ace._verifyChallengeCleanup(sc, err, id) - } - - var challenge *ACMEChallenge - for _, authzChallenge := range authz.Challenges { - if authzChallenge.Type == cv.ChallengeType { - challenge = authzChallenge - break - } - } - - if challenge == nil { - err = fmt.Errorf("no challenge of type %v in authorization %v/%v for challenge %v", cv.ChallengeType, cv.Account, cv.Authorization, id) - return ace._verifyChallengeCleanup(sc, err, id) - } - - if challenge.Status != ACMEChallengePending && challenge.Status != ACMEChallengeProcessing { - err = fmt.Errorf("challenge is in invalid state %v in authorization %v/%v for challenge %v", challenge.Status, cv.Account, cv.Authorization, id) - return ace._verifyChallengeCleanup(sc, err, id) - } - - var valid bool - switch challenge.Type { - case ACMEHTTPChallenge: - if authz.Identifier.Type != ACMEDNSIdentifier && authz.Identifier.Type != ACMEIPIdentifier { - err = fmt.Errorf("unsupported identifier type for authorization %v/%v in challenge %v: %v", cv.Account, cv.Authorization, id, authz.Identifier.Type) - return ace._verifyChallengeCleanup(sc, err, id) - } - - if authz.Wildcard { - err = fmt.Errorf("unable to validate wildcard authorization %v/%v in challenge %v via http-01 challenge", cv.Account, cv.Authorization, id) - return ace._verifyChallengeCleanup(sc, err, id) - } - - valid, err = ValidateHTTP01Challenge(authz.Identifier.Value, cv.Token, cv.Thumbprint, config) - if err != nil { - err = fmt.Errorf("%w: error validating http-01 challenge %v: %v; %v", ErrIncorrectResponse, id, err, ChallengeAttemptFailedMsg) - return ace._verifyChallengeRetry(sc, cv, authzPath, authz, challenge, err, id) - } - case ACMEDNSChallenge: - if authz.Identifier.Type != ACMEDNSIdentifier { - err = fmt.Errorf("unsupported identifier type for authorization %v/%v in challenge %v: %v", cv.Account, cv.Authorization, id, authz.Identifier.Type) - return ace._verifyChallengeCleanup(sc, err, id) - } - - valid, err = ValidateDNS01Challenge(authz.Identifier.Value, cv.Token, cv.Thumbprint, config) - if err != nil { - err = fmt.Errorf("%w: error validating dns-01 challenge %v: %v; %v", ErrIncorrectResponse, id, err, ChallengeAttemptFailedMsg) - return ace._verifyChallengeRetry(sc, cv, authzPath, authz, challenge, err, id) - } - case ACMEALPNChallenge: - if authz.Identifier.Type != ACMEDNSIdentifier { - err = fmt.Errorf("unsupported identifier type for authorization %v/%v in challenge %v: %v", cv.Account, cv.Authorization, id, authz.Identifier.Type) - return ace._verifyChallengeCleanup(sc, err, id) - } - - if authz.Wildcard { - err = fmt.Errorf("unable to validate wildcard authorization %v/%v in challenge %v via tls-alpn-01 challenge", cv.Account, cv.Authorization, id) - return ace._verifyChallengeCleanup(sc, err, id) - } - - valid, err = ValidateTLSALPN01Challenge(authz.Identifier.Value, cv.Token, cv.Thumbprint, config) - if err != nil { - err = fmt.Errorf("%w: error validating tls-alpn-01 challenge %v: %s", ErrIncorrectResponse, id, err.Error()) - return ace._verifyChallengeRetry(sc, cv, authzPath, authz, challenge, err, id) - } - default: - err = fmt.Errorf("unsupported ACME challenge type %v for challenge %v", cv.ChallengeType, id) - return ace._verifyChallengeCleanup(sc, err, id) - } - - if !valid { - err = fmt.Errorf("%w: challenge failed with no additional information", ErrIncorrectResponse) - return ace._verifyChallengeRetry(sc, cv, authzPath, authz, challenge, err, id) - } - - // If we got here, the challenge verification was successful. Update - // the authorization appropriately. - expires := now.Add(15 * 24 * time.Hour) - challenge.Status = ACMEChallengeValid - challenge.Validated = now.Format(time.RFC3339) - challenge.Error = nil - authz.Status = ACMEAuthorizationValid - authz.Expires = expires.Format(time.RFC3339) - - if err := saveAuthorizationAtPath(sc, authzPath, authz); err != nil { - err = fmt.Errorf("error saving updated (validated) authorization %v/%v for challenge %v: %w", cv.Account, cv.Authorization, id, err) - return ace._verifyChallengeRetry(sc, cv, authzPath, authz, challenge, err, id) - } - - return ace._verifyChallengeCleanup(sc, nil, id) -} - -func (ace *ACMEChallengeEngine) _verifyChallengeRetry(sc *storageContext, cv *ChallengeValidation, authzPath string, auth *ACMEAuthorization, challenge *ACMEChallenge, verificationErr error, id string) (bool, time.Time, error) { - now := time.Now() - path := acmeValidationPrefix + id - - if err := updateChallengeStatus(sc, cv, authzPath, auth, challenge, verificationErr); err != nil { - return true, now, err - } - - if cv.RetryCount > MaxRetryAttempts { - err := fmt.Errorf("reached max error attempts for challenge %v: %w", id, verificationErr) - return ace._verifyChallengeCleanup(sc, err, id) - } - - if cv.FirstValidation.IsZero() { - cv.FirstValidation = now - } - cv.RetryCount += 1 - cv.LastRetry = now - cv.RetryAfter = now.Add(time.Duration(cv.RetryCount*5) * time.Second) - - json, jsonErr := logical.StorageEntryJSON(path, cv) - if jsonErr != nil { - return true, now, fmt.Errorf("error persisting updated challenge validation queue entry (error prior to retry, if any: %v): %w", verificationErr, jsonErr) - } - - if putErr := sc.Storage.Put(sc.Context, json); putErr != nil { - return true, now, fmt.Errorf("error writing updated challenge validation entry (error prior to retry, if any: %v): %w", verificationErr, putErr) - } - - if verificationErr != nil { - verificationErr = fmt.Errorf("retrying validation: %w", verificationErr) - } - - return true, cv.RetryAfter, verificationErr -} - -func updateChallengeStatus(sc *storageContext, cv *ChallengeValidation, authzPath string, auth *ACMEAuthorization, challenge *ACMEChallenge, verificationErr error) error { - if verificationErr != nil { - challengeError := TranslateErrorToErrorResponse(verificationErr) - challenge.Error = challengeError.MarshalForStorage() - } - - if cv.RetryCount > MaxRetryAttempts { - challenge.Status = ACMEChallengeInvalid - auth.Status = ACMEAuthorizationInvalid - } - - if err := saveAuthorizationAtPath(sc, authzPath, auth); err != nil { - return fmt.Errorf("error persisting authorization/challenge update: %w", err) - } - return nil -} - -func (ace *ACMEChallengeEngine) _verifyChallengeCleanup(sc *storageContext, err error, id string) (bool, time.Time, error) { - now := time.Now() - - // Remove our ChallengeValidation entry only. - if deleteErr := sc.Storage.Delete(sc.Context, acmeValidationPrefix+id); deleteErr != nil { - return true, now.Add(-1 * time.Second), fmt.Errorf("error deleting challenge %v (error prior to cleanup, if any: %v): %w", id, err, deleteErr) - } - - if err != nil { - err = fmt.Errorf("removing challenge validation attempt and not retrying %v; previous error: %w", id, err) - } - - return false, now, err -} diff --git a/builtin/logical/pki/acme_challenges.go b/builtin/logical/pki/acme_challenges.go deleted file mode 100644 index 855035729e97ea..00000000000000 --- a/builtin/logical/pki/acme_challenges.go +++ /dev/null @@ -1,469 +0,0 @@ -package pki - -import ( - "bytes" - "context" - "crypto/sha256" - "crypto/tls" - "crypto/x509" - "encoding/asn1" - "encoding/base64" - "fmt" - "io" - "net" - "net/http" - "strings" - "time" -) - -const ( - DNSChallengePrefix = "_acme-challenge." - ALPNProtocol = "acme-tls/1" -) - -// While this should be a constant, there's no way to do a low-level test of -// ValidateTLSALPN01Challenge without spinning up a complicated Docker -// instance to build a custom responder. Because we already have a local -// toolchain, it is far easier to drive this through Go tests with a custom -// (high) port, rather than requiring permission to bind to port 443 (root-run -// tests are even worse). -var ALPNPort = "443" - -// OID of the acmeIdentifier X.509 Certificate Extension. -var OIDACMEIdentifier = asn1.ObjectIdentifier{1, 3, 6, 1, 5, 5, 7, 1, 31} - -// ValidateKeyAuthorization validates that the given keyAuthz from a challenge -// matches our expectation, returning (true, nil) if so, or (false, err) if -// not. -func ValidateKeyAuthorization(keyAuthz string, token string, thumbprint string) (bool, error) { - parts := strings.Split(keyAuthz, ".") - if len(parts) != 2 { - return false, fmt.Errorf("invalid authorization: got %v parts, expected 2", len(parts)) - } - - tokenPart := parts[0] - thumbprintPart := parts[1] - - if token != tokenPart || thumbprint != thumbprintPart { - return false, fmt.Errorf("key authorization was invalid") - } - - return true, nil -} - -// ValidateSHA256KeyAuthorization validates that the given keyAuthz from a -// challenge matches our expectation, returning (true, nil) if so, or -// (false, err) if not. -// -// This is for use with DNS challenges, which require -func ValidateSHA256KeyAuthorization(keyAuthz string, token string, thumbprint string) (bool, error) { - authzContents := token + "." + thumbprint - checksum := sha256.Sum256([]byte(authzContents)) - expectedAuthz := base64.RawURLEncoding.EncodeToString(checksum[:]) - - if keyAuthz != expectedAuthz { - return false, fmt.Errorf("sha256 key authorization was invalid") - } - - return true, nil -} - -func buildResolver(config *acmeConfigEntry) (*net.Resolver, error) { - if len(config.DNSResolver) == 0 { - return net.DefaultResolver, nil - } - - return &net.Resolver{ - PreferGo: true, - StrictErrors: false, - Dial: func(ctx context.Context, network, address string) (net.Conn, error) { - d := net.Dialer{ - Timeout: 10 * time.Second, - } - return d.DialContext(ctx, network, config.DNSResolver) - }, - }, nil -} - -func buildDialerConfig(config *acmeConfigEntry) (*net.Dialer, error) { - resolver, err := buildResolver(config) - if err != nil { - return nil, fmt.Errorf("failed to build resolver: %w", err) - } - - return &net.Dialer{ - Timeout: 10 * time.Second, - KeepAlive: -1 * time.Second, - Resolver: resolver, - }, nil -} - -// Validates a given ACME http-01 challenge against the specified domain, -// per RFC 8555. -// -// We attempt to be defensive here against timeouts, extra redirects, &c. -func ValidateHTTP01Challenge(domain string, token string, thumbprint string, config *acmeConfigEntry) (bool, error) { - path := "http://" + domain + "/.well-known/acme-challenge/" + token - dialer, err := buildDialerConfig(config) - if err != nil { - return false, fmt.Errorf("failed to build dialer: %w", err) - } - - transport := &http.Transport{ - // Only a single request is sent to this server as we do not do any - // batching of validation attempts. There is no need to do an HTTP - // KeepAlive as a result. - DisableKeepAlives: true, - MaxIdleConns: 1, - MaxIdleConnsPerHost: 1, - MaxConnsPerHost: 1, - IdleConnTimeout: 1 * time.Second, - - // We'd rather timeout and re-attempt validation later than hang - // too many validators waiting for slow hosts. - DialContext: dialer.DialContext, - ResponseHeaderTimeout: 10 * time.Second, - } - - maxRedirects := 10 - urlLength := 2000 - - client := &http.Client{ - Transport: transport, - CheckRedirect: func(req *http.Request, via []*http.Request) error { - if len(via)+1 >= maxRedirects { - return fmt.Errorf("http-01: too many redirects: %v", len(via)+1) - } - - reqUrlLen := len(req.URL.String()) - if reqUrlLen > urlLength { - return fmt.Errorf("http-01: redirect url length too long: %v", reqUrlLen) - } - - return nil - }, - } - - resp, err := client.Get(path) - if err != nil { - return false, fmt.Errorf("http-01: failed to fetch path %v: %w", path, err) - } - - // We provision a buffer which allows for a variable size challenge, some - // whitespace, and a detection gap for too long of a message. - minExpected := len(token) + 1 + len(thumbprint) - maxExpected := 512 - - defer resp.Body.Close() - - // Attempt to read the body, but don't do so infinitely. - body, err := io.ReadAll(io.LimitReader(resp.Body, int64(maxExpected+1))) - if err != nil { - return false, fmt.Errorf("http-01: unexpected error while reading body: %w", err) - } - - if len(body) > maxExpected { - return false, fmt.Errorf("http-01: response too large: received %v > %v bytes", len(body), maxExpected) - } - - if len(body) < minExpected { - return false, fmt.Errorf("http-01: response too small: received %v < %v bytes", len(body), minExpected) - } - - // Per RFC 8555 Section 8.3. HTTP Challenge: - // - // > The server SHOULD ignore whitespace characters at the end of the body. - keyAuthz := string(body) - keyAuthz = strings.TrimSpace(keyAuthz) - - // If we got here, we got no non-EOF error while reading. Try to validate - // the token because we're bounded by a reasonable amount of length. - return ValidateKeyAuthorization(keyAuthz, token, thumbprint) -} - -func ValidateDNS01Challenge(domain string, token string, thumbprint string, config *acmeConfigEntry) (bool, error) { - // Here, domain is the value from the post-wildcard-processed identifier. - // Per RFC 8555, no difference in validation occurs if a wildcard entry - // is requested or if a non-wildcard entry is requested. - // - // XXX: In this case the DNS server is operator controlled and is assumed - // to be less malicious so the default resolver is used. In the future, - // we'll want to use net.Resolver for two reasons: - // - // 1. To control the actual resolver via ACME configuration, - // 2. To use a context to set stricter timeout limits. - resolver, err := buildResolver(config) - if err != nil { - return false, fmt.Errorf("failed to build resolver: %w", err) - } - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - name := DNSChallengePrefix + domain - results, err := resolver.LookupTXT(ctx, name) - if err != nil { - return false, fmt.Errorf("dns-01: failed to lookup TXT records for domain (%v) via resolver %v: %w", name, config.DNSResolver, err) - } - - for _, keyAuthz := range results { - ok, _ := ValidateSHA256KeyAuthorization(keyAuthz, token, thumbprint) - if ok { - return true, nil - } - } - - return false, fmt.Errorf("dns-01: challenge failed against %v records", len(results)) -} - -func ValidateTLSALPN01Challenge(domain string, token string, thumbprint string, config *acmeConfigEntry) (bool, error) { - // This RFC is defined in RFC 8737 Automated Certificate Management - // Environment (ACME) TLS Application‑Layer Protocol Negotiation - // (ALPN) Challenge Extension. - // - // This is conceptually similar to ValidateHTTP01Challenge, but - // uses a TLS connection on port 443 with the specified ALPN - // protocol. - - cfg := &tls.Config{ - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge, the name of the negotiated - // protocol is "acme-tls/1". - NextProtos: []string{ALPNProtocol}, - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > ... and an SNI extension containing only the domain name - // > being validated during the TLS handshake. - // - // According to the Go docs, setting this option (even though - // InsecureSkipVerify=true is also specified), allows us to - // set the SNI extension to this value. - ServerName: domain, - - VerifyConnection: func(connState tls.ConnectionState) error { - // We initiated a fresh connection with no session tickets; - // even if we did have a session ticket, we do not wish to - // use it. Verify that the server has not inadvertently - // reused connections between validation attempts or something. - if connState.DidResume { - return fmt.Errorf("server under test incorrectly reported that handshake was resumed when no session cache was provided; refusing to continue") - } - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > The ACME server verifies that during the TLS handshake the - // > application-layer protocol "acme-tls/1" was successfully - // > negotiated (and that the ALPN extension contained only the - // > value "acme-tls/1"). - if connState.NegotiatedProtocol != ALPNProtocol { - return fmt.Errorf("server under test negotiated unexpected ALPN protocol %v", connState.NegotiatedProtocol) - } - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > and that the certificate returned - // - // Because this certificate MUST be self-signed (per earlier - // statement in RFC 8737 Section 3), there is no point in sending - // more than one certificate, and so we will err early here if - // we got more than one. - if len(connState.PeerCertificates) > 1 { - return fmt.Errorf("server under test returned multiple (%v) certificates when we expected only one", len(connState.PeerCertificates)) - } - cert := connState.PeerCertificates[0] - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > The client prepares for validation by constructing a - // > self-signed certificate that MUST contain an acmeIdentifier - // > extension and a subjectAlternativeName extension [RFC5280]. - // - // Verify that this is a self-signed certificate that isn't signed - // by another certificate (i.e., with the same key material but - // different issuer). - if err := cert.CheckSignatureFrom(cert); err != nil { - return fmt.Errorf("server under test returned a non-self-signed certificate: %w", err) - } - if !bytes.Equal(cert.RawSubject, cert.RawIssuer) { - return fmt.Errorf("server under test returned a non-self-signed certificate: invalid subject (%v) <-> issuer (%v) match", cert.Subject.String(), cert.Issuer.String()) - } - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > The subjectAlternativeName extension MUST contain a single - // > dNSName entry where the value is the domain name being - // > validated. - // - // TODO: this does not validate that there are not other SANs - // with unknown (to Go) OIDs. - if len(cert.DNSNames) != 1 || len(cert.EmailAddresses) > 0 || len(cert.IPAddresses) > 0 || len(cert.URIs) > 0 { - return fmt.Errorf("server under test returned a certificate with incorrect SANs") - } - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > The comparison of dNSNames MUST be case insensitive - // > [RFC4343]. Note that as ACME doesn't support Unicode - // > identifiers, all dNSNames MUST be encoded using the rules - // > of [RFC3492]. - if !strings.EqualFold(cert.DNSNames[0], domain) { - return fmt.Errorf("server under test returned a certificate with unexpected identifier: %v", cert.DNSNames[0]) - } - - // Per above, verify that the acmeIdentifier extension is present - // exactly once and has the correct value. - var foundACMEId bool - for _, ext := range cert.Extensions { - if !ext.Id.Equal(OIDACMEIdentifier) { - continue - } - - // There must be only a single ACME extension. - if foundACMEId { - return fmt.Errorf("server under test returned a certificate with multiple acmeIdentifier extensions") - } - foundACMEId = true - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > a critical acmeIdentifier extension - if !ext.Critical { - return fmt.Errorf("server under test returned a certificate with an acmeIdentifier extension marked non-Critical") - } - - keyAuthz := string(ext.Value) - ok, err := ValidateSHA256KeyAuthorization(keyAuthz, token, thumbprint) - if !ok || err != nil { - return fmt.Errorf("server under test returned a certificate with an invalid key authorization (%w)", err) - } - } - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > The ACME server verifies that ... the certificate returned - // > contains: ... a critical acmeIdentifier extension containing - // > the expected SHA-256 digest computed in step 1. - if !foundACMEId { - return fmt.Errorf("server under test returned a certificate without the required acmeIdentifier extension") - } - - // Remove the handled critical extension and validate that we - // have no additional critical extensions left unhandled. - var index int = -1 - for oidIndex, oid := range cert.UnhandledCriticalExtensions { - if oid.Equal(OIDACMEIdentifier) { - index = oidIndex - break - } - } - if index != -1 { - // Unlike the foundACMEId case, this is not a failure; if Go - // updates to "understand" this critical extension, we do not - // wish to fail. - cert.UnhandledCriticalExtensions = append(cert.UnhandledCriticalExtensions[0:index], cert.UnhandledCriticalExtensions[index+1:]...) - } - if len(cert.UnhandledCriticalExtensions) > 0 { - return fmt.Errorf("server under test returned a certificate with additional unknown critical extensions (%v)", cert.UnhandledCriticalExtensions) - } - - // All good! - return nil - }, - - // We never want to resume a connection; do not provide session - // cache storage. - ClientSessionCache: nil, - - // Do not trust any system trusted certificates; we're going to be - // manually validating the chain, so specifying a non-empty pool - // here could only cause additional, unnecessary work. - RootCAs: x509.NewCertPool(), - - // Do not bother validating the client's chain; we know it should be - // self-signed. This also disables hostname verification, but we do - // this verification as part of VerifyConnection(...) ourselves. - // - // Per Go docs, this option is only safe in conjunction with - // VerifyConnection which we define above. - InsecureSkipVerify: true, - - // RFC 8737 Section 4. acme-tls/1 Protocol Definition: - // - // > ACME servers that implement "acme-tls/1" MUST only negotiate - // > TLS 1.2 [RFC5246] or higher when connecting to clients for - // > validation. - MinVersion: tls.VersionTLS12, - - // While RFC 8737 does not place restrictions around allowed cipher - // suites, we wish to restrict ourselves to secure defaults. Specify - // the Intermediate guideline from Mozilla's TLS config generator to - // disable obviously weak ciphers. - // - // See also: https://ssl-config.mozilla.org/#server=go&version=1.14.4&config=intermediate&guideline=5.7 - CipherSuites: []uint16{ - tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, - tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, - tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, - tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, - tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305, - tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305, - }, - } - - // Build a dialer using our custom DNS resolver, to ensure domains get - // resolved according to configuration. - dialer, err := buildDialerConfig(config) - if err != nil { - return false, fmt.Errorf("failed to build dialer: %w", err) - } - - // Per RFC 8737 Section 3. TLS with Application-Layer Protocol - // Negotiation (TLS ALPN) Challenge: - // - // > 2. The ACME server resolves the domain name being validated and - // > chooses one of the IP addresses returned for validation (the - // > server MAY validate against multiple addresses if more than - // > one is returned). - // > 3. The ACME server initiates a TLS connection to the chosen IP - // > address. This connection MUST use TCP port 443. - address := fmt.Sprintf("%v:"+ALPNPort, domain) - conn, err := dialer.Dial("tcp", address) - if err != nil { - return false, fmt.Errorf("tls-alpn-01: failed to dial host: %w", err) - } - - // Initiate the connection to the remote peer. - client := tls.Client(conn, cfg) - - // We intentionally swallow this error as it isn't useful to the - // underlying protocol we perform here. Notably, per RFC 8737 - // Section 4. acme-tls/1 Protocol Definition: - // - // > Once the handshake is completed, the client MUST NOT exchange - // > any further data with the server and MUST immediately close the - // > connection. ... Because of this, an ACME server MAY choose to - // > withhold authorization if either the certificate signature is - // > invalid or the handshake doesn't fully complete. - defer client.Close() - - // We wish to put time bounds on the total time the handshake can - // stall for, so build a connection context here. - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - // See note above about why we can allow Handshake to complete - // successfully. - if err := client.HandshakeContext(ctx); err != nil { - return false, fmt.Errorf("tls-alpn-01: failed to perform handshake: %w", err) - } - return true, nil -} diff --git a/builtin/logical/pki/acme_challenges_test.go b/builtin/logical/pki/acme_challenges_test.go deleted file mode 100644 index 3bcdf88141d79a..00000000000000 --- a/builtin/logical/pki/acme_challenges_test.go +++ /dev/null @@ -1,703 +0,0 @@ -package pki - -import ( - "context" - "crypto" - "crypto/ecdsa" - "crypto/elliptic" - "crypto/rand" - "crypto/sha256" - "crypto/tls" - "crypto/x509" - "crypto/x509/pkix" - "encoding/base64" - "fmt" - "math/big" - "net/http" - "net/http/httptest" - "strings" - "testing" - "time" - - "github.com/hashicorp/vault/builtin/logical/pki/dnstest" - - "github.com/stretchr/testify/require" -) - -type keyAuthorizationTestCase struct { - keyAuthz string - token string - thumbprint string - shouldFail bool -} - -var keyAuthorizationTestCases = []keyAuthorizationTestCase{ - { - // Entirely empty - "", - "non-empty-token", - "non-empty-thumbprint", - true, - }, - { - // Both empty - ".", - "non-empty-token", - "non-empty-thumbprint", - true, - }, - { - // Not equal - "non-.non-", - "non-empty-token", - "non-empty-thumbprint", - true, - }, - { - // Empty thumbprint - "non-.", - "non-empty-token", - "non-empty-thumbprint", - true, - }, - { - // Empty token - ".non-", - "non-empty-token", - "non-empty-thumbprint", - true, - }, - { - // Wrong order - "non-empty-thumbprint.non-empty-token", - "non-empty-token", - "non-empty-thumbprint", - true, - }, - { - // Too many pieces - "one.two.three", - "non-empty-token", - "non-empty-thumbprint", - true, - }, - { - // Valid - "non-empty-token.non-empty-thumbprint", - "non-empty-token", - "non-empty-thumbprint", - false, - }, -} - -func TestAcmeValidateKeyAuthorization(t *testing.T) { - t.Parallel() - - for index, tc := range keyAuthorizationTestCases { - isValid, err := ValidateKeyAuthorization(tc.keyAuthz, tc.token, tc.thumbprint) - if !isValid && err == nil { - t.Fatalf("[%d] expected failure to give reason via err (%v / %v)", index, isValid, err) - } - - expectedValid := !tc.shouldFail - if expectedValid != isValid { - t.Fatalf("[%d] got ret=%v, expected ret=%v (shouldFail=%v)", index, isValid, expectedValid, tc.shouldFail) - } - } -} - -func TestAcmeValidateHTTP01Challenge(t *testing.T) { - t.Parallel() - - for index, tc := range keyAuthorizationTestCases { - validFunc := func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(tc.keyAuthz)) - } - withPadding := func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(" " + tc.keyAuthz + " ")) - } - withRedirect := func(w http.ResponseWriter, r *http.Request) { - if strings.Contains(r.URL.Path, "/.well-known/") { - http.Redirect(w, r, "/my-http-01-challenge-response", 301) - return - } - - w.Write([]byte(tc.keyAuthz)) - } - withSleep := func(w http.ResponseWriter, r *http.Request) { - // Long enough to ensure any excessively short timeouts are hit, - // not long enough to trigger a failure (hopefully). - time.Sleep(5 * time.Second) - w.Write([]byte(tc.keyAuthz)) - } - - validHandlers := []http.HandlerFunc{ - http.HandlerFunc(validFunc), http.HandlerFunc(withPadding), - http.HandlerFunc(withRedirect), http.HandlerFunc(withSleep), - } - - for handlerIndex, handler := range validHandlers { - func() { - ts := httptest.NewServer(handler) - defer ts.Close() - - host := ts.URL[7:] - isValid, err := ValidateHTTP01Challenge(host, tc.token, tc.thumbprint, &acmeConfigEntry{}) - if !isValid && err == nil { - t.Fatalf("[tc=%d/handler=%d] expected failure to give reason via err (%v / %v)", index, handlerIndex, isValid, err) - } - - expectedValid := !tc.shouldFail - if expectedValid != isValid { - t.Fatalf("[tc=%d/handler=%d] got ret=%v (err=%v), expected ret=%v (shouldFail=%v)", index, handlerIndex, isValid, err, expectedValid, tc.shouldFail) - } - }() - } - } - - // Negative test cases for various HTTP-specific scenarios. - redirectLoop := func(w http.ResponseWriter, r *http.Request) { - http.Redirect(w, r, "/my-http-01-challenge-response", 301) - } - publicRedirect := func(w http.ResponseWriter, r *http.Request) { - http.Redirect(w, r, "http://hashicorp.com/", 301) - } - noData := func(w http.ResponseWriter, r *http.Request) {} - noContent := func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNoContent) - } - notFound := func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNotFound) - } - simulateHang := func(w http.ResponseWriter, r *http.Request) { - time.Sleep(30 * time.Second) - w.Write([]byte("my-token.my-thumbprint")) - } - tooLarge := func(w http.ResponseWriter, r *http.Request) { - for i := 0; i < 512; i++ { - w.Write([]byte("my-token.my-thumbprint\n")) - } - } - - validHandlers := []http.HandlerFunc{ - http.HandlerFunc(redirectLoop), http.HandlerFunc(publicRedirect), - http.HandlerFunc(noData), http.HandlerFunc(noContent), - http.HandlerFunc(notFound), http.HandlerFunc(simulateHang), - http.HandlerFunc(tooLarge), - } - for handlerIndex, handler := range validHandlers { - func() { - ts := httptest.NewServer(handler) - defer ts.Close() - - host := ts.URL[7:] - isValid, err := ValidateHTTP01Challenge(host, "my-token", "my-thumbprint", &acmeConfigEntry{}) - if isValid || err == nil { - t.Fatalf("[handler=%d] expected failure validating challenge (%v / %v)", handlerIndex, isValid, err) - } - }() - } -} - -func TestAcmeValidateDNS01Challenge(t *testing.T) { - t.Parallel() - - host := "dadgarcorp.com" - resolver := dnstest.SetupResolver(t, host) - defer resolver.Cleanup() - - t.Logf("DNS Server Address: %v", resolver.GetLocalAddr()) - - config := &acmeConfigEntry{ - DNSResolver: resolver.GetLocalAddr(), - } - - for index, tc := range keyAuthorizationTestCases { - checksum := sha256.Sum256([]byte(tc.keyAuthz)) - authz := base64.RawURLEncoding.EncodeToString(checksum[:]) - resolver.AddRecord(DNSChallengePrefix+host, "TXT", authz) - resolver.PushConfig() - - isValid, err := ValidateDNS01Challenge(host, tc.token, tc.thumbprint, config) - if !isValid && err == nil { - t.Fatalf("[tc=%d] expected failure to give reason via err (%v / %v)", index, isValid, err) - } - - expectedValid := !tc.shouldFail - if expectedValid != isValid { - t.Fatalf("[tc=%d] got ret=%v (err=%v), expected ret=%v (shouldFail=%v)", index, isValid, err, expectedValid, tc.shouldFail) - } - - resolver.RemoveAllRecords() - } -} - -func TestAcmeValidateTLSALPN01Challenge(t *testing.T) { - // This test is not parallel because we modify ALPNPort to use a custom - // non-standard port _just for testing purposes_. - host := "localhost" - config := &acmeConfigEntry{} - - returnedProtocols := []string{ALPNProtocol} - var certificates []*x509.Certificate - var privateKey crypto.PrivateKey - - tlsCfg := &tls.Config{} - tlsCfg.GetConfigForClient = func(*tls.ClientHelloInfo) (*tls.Config, error) { - var retCfg tls.Config = *tlsCfg - retCfg.NextProtos = returnedProtocols - t.Logf("[alpn-server] returned protocol: %v", returnedProtocols) - return &retCfg, nil - } - tlsCfg.GetCertificate = func(*tls.ClientHelloInfo) (*tls.Certificate, error) { - var ret tls.Certificate - for index, cert := range certificates { - ret.Certificate = append(ret.Certificate, cert.Raw) - if index == 0 { - ret.Leaf = cert - } - } - ret.PrivateKey = privateKey - t.Logf("[alpn-server] returned certificates: %v", ret) - return &ret, nil - } - - ln, err := tls.Listen("tcp", host+":0", tlsCfg) - require.NoError(t, err, "failed to listen with TLS config") - - doOneAccept := func() { - t.Logf("[alpn-server] starting accept...") - connRaw, err := ln.Accept() - require.NoError(t, err, "failed to accept TLS connection") - - t.Logf("[alpn-server] got connection...") - conn := tls.Server(connRaw.(*tls.Conn), tlsCfg) - - ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute) - defer func() { - t.Logf("[alpn-server] defer context cancel executing") - cancel() - }() - - t.Logf("[alpn-server] starting handshake...") - if err := conn.HandshakeContext(ctx); err != nil { - t.Logf("[alpn-server] got non-fatal error while handshaking connection: %v", err) - } - - t.Logf("[alpn-server] closing connection...") - if err := conn.Close(); err != nil { - t.Logf("[alpn-server] got non-fatal error while closing connection: %v", err) - } - } - - ALPNPort = strings.Split(ln.Addr().String(), ":")[1] - - type alpnTestCase struct { - name string - certificates []*x509.Certificate - privateKey crypto.PrivateKey - protocols []string - token string - thumbprint string - shouldFail bool - } - - var alpnTestCases []alpnTestCase - // Add all of our keyAuthorizationTestCases into alpnTestCases - for index, tc := range keyAuthorizationTestCases { - t.Logf("using keyAuthorizationTestCase [tc=%d] as alpnTestCase [tc=%d]...", index, len(alpnTestCases)) - // Properly encode the authorization. - checksum := sha256.Sum256([]byte(tc.keyAuthz)) - authz := base64.RawURLEncoding.EncodeToString(checksum[:]) - - // Build a self-signed certificate. - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating private key") - tmpl := &x509.Certificate{ - Subject: pkix.Name{ - CommonName: host, - }, - Issuer: pkix.Name{ - CommonName: host, - }, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: key.Public(), - SerialNumber: big.NewInt(1), - DNSNames: []string{host}, - ExtraExtensions: []pkix.Extension{ - { - Id: OIDACMEIdentifier, - Critical: true, - Value: []byte(authz), - }, - }, - BasicConstraintsValid: true, - IsCA: true, - } - certBytes, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, key.Public(), key) - require.NoError(t, err, "failed to create certificate") - cert, err := x509.ParseCertificate(certBytes) - require.NoError(t, err, "failed to parse newly generated certificate") - - newTc := alpnTestCase{ - name: fmt.Sprintf("keyAuthorizationTestCase[%d]", index), - certificates: []*x509.Certificate{cert}, - privateKey: key, - protocols: []string{ALPNProtocol}, - token: tc.token, - thumbprint: tc.thumbprint, - shouldFail: tc.shouldFail, - } - alpnTestCases = append(alpnTestCases, newTc) - } - - { - // Test case: Longer chain - // Build a self-signed certificate. - rootKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating root private key") - tmpl := &x509.Certificate{ - Subject: pkix.Name{ - CommonName: "Root CA", - }, - Issuer: pkix.Name{ - CommonName: "Root CA", - }, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: rootKey.Public(), - SerialNumber: big.NewInt(1), - BasicConstraintsValid: true, - IsCA: true, - } - rootCertBytes, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, rootKey.Public(), rootKey) - require.NoError(t, err, "failed to create root certificate") - rootCert, err := x509.ParseCertificate(rootCertBytes) - require.NoError(t, err, "failed to parse newly generated root certificate") - - // Compute our authorization. - checksum := sha256.Sum256([]byte("valid.valid")) - authz := base64.RawURLEncoding.EncodeToString(checksum[:]) - - // Build a leaf certificate which _could_ pass validation - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating leaf private key") - tmpl = &x509.Certificate{ - Subject: pkix.Name{ - CommonName: host, - }, - Issuer: pkix.Name{ - CommonName: "Root CA", - }, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: key.Public(), - SerialNumber: big.NewInt(2), - DNSNames: []string{host}, - ExtraExtensions: []pkix.Extension{ - { - Id: OIDACMEIdentifier, - Critical: true, - Value: []byte(authz), - }, - }, - BasicConstraintsValid: true, - IsCA: true, - } - certBytes, err := x509.CreateCertificate(rand.Reader, tmpl, rootCert, key.Public(), rootKey) - require.NoError(t, err, "failed to create leaf certificate") - cert, err := x509.ParseCertificate(certBytes) - require.NoError(t, err, "failed to parse newly generated leaf certificate") - - newTc := alpnTestCase{ - name: "longer chain with valid leaf", - certificates: []*x509.Certificate{cert, rootCert}, - privateKey: key, - protocols: []string{ALPNProtocol}, - token: "valid", - thumbprint: "valid", - shouldFail: true, - } - alpnTestCases = append(alpnTestCases, newTc) - } - - { - // Test case: cert without DNSSan - // Compute our authorization. - checksum := sha256.Sum256([]byte("valid.valid")) - authz := base64.RawURLEncoding.EncodeToString(checksum[:]) - - // Build a leaf certificate without a DNSSan - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating leaf private key") - tmpl := &x509.Certificate{ - Subject: pkix.Name{ - CommonName: host, - }, - Issuer: pkix.Name{ - CommonName: host, - }, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: key.Public(), - SerialNumber: big.NewInt(2), - // NO DNSNames - ExtraExtensions: []pkix.Extension{ - { - Id: OIDACMEIdentifier, - Critical: true, - Value: []byte(authz), - }, - }, - BasicConstraintsValid: true, - IsCA: true, - } - certBytes, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, key.Public(), key) - require.NoError(t, err, "failed to create leaf certificate") - cert, err := x509.ParseCertificate(certBytes) - require.NoError(t, err, "failed to parse newly generated leaf certificate") - - newTc := alpnTestCase{ - name: "valid keyauthz without valid dnsname", - certificates: []*x509.Certificate{cert}, - privateKey: key, - protocols: []string{ALPNProtocol}, - token: "valid", - thumbprint: "valid", - shouldFail: true, - } - alpnTestCases = append(alpnTestCases, newTc) - } - - { - // Test case: cert without matching DNSSan - // Compute our authorization. - checksum := sha256.Sum256([]byte("valid.valid")) - authz := base64.RawURLEncoding.EncodeToString(checksum[:]) - - // Build a leaf certificate which fails validation due to bad DNSName - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating leaf private key") - tmpl := &x509.Certificate{ - Subject: pkix.Name{ - CommonName: host, - }, - Issuer: pkix.Name{ - CommonName: host, - }, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: key.Public(), - SerialNumber: big.NewInt(2), - DNSNames: []string{host + ".dadgarcorp.com" /* not matching host! */}, - ExtraExtensions: []pkix.Extension{ - { - Id: OIDACMEIdentifier, - Critical: true, - Value: []byte(authz), - }, - }, - BasicConstraintsValid: true, - IsCA: true, - } - certBytes, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, key.Public(), key) - require.NoError(t, err, "failed to create leaf certificate") - cert, err := x509.ParseCertificate(certBytes) - require.NoError(t, err, "failed to parse newly generated leaf certificate") - - newTc := alpnTestCase{ - name: "valid keyauthz without matching dnsname", - certificates: []*x509.Certificate{cert}, - privateKey: key, - protocols: []string{ALPNProtocol}, - token: "valid", - thumbprint: "valid", - shouldFail: true, - } - alpnTestCases = append(alpnTestCases, newTc) - } - - { - // Test case: cert with additional SAN - // Compute our authorization. - checksum := sha256.Sum256([]byte("valid.valid")) - authz := base64.RawURLEncoding.EncodeToString(checksum[:]) - - // Build a leaf certificate which has an invalid additional SAN - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating leaf private key") - tmpl := &x509.Certificate{ - Subject: pkix.Name{ - CommonName: host, - }, - Issuer: pkix.Name{ - CommonName: host, - }, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: key.Public(), - SerialNumber: big.NewInt(2), - DNSNames: []string{host}, - EmailAddresses: []string{"webmaster@" + host}, /* unexpected */ - ExtraExtensions: []pkix.Extension{ - { - Id: OIDACMEIdentifier, - Critical: true, - Value: []byte(authz), - }, - }, - BasicConstraintsValid: true, - IsCA: true, - } - certBytes, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, key.Public(), key) - require.NoError(t, err, "failed to create leaf certificate") - cert, err := x509.ParseCertificate(certBytes) - require.NoError(t, err, "failed to parse newly generated leaf certificate") - - newTc := alpnTestCase{ - name: "valid keyauthz with additional email SANs", - certificates: []*x509.Certificate{cert}, - privateKey: key, - protocols: []string{ALPNProtocol}, - token: "valid", - thumbprint: "valid", - shouldFail: true, - } - alpnTestCases = append(alpnTestCases, newTc) - } - - { - // Test case: cert without CN - // Compute our authorization. - checksum := sha256.Sum256([]byte("valid.valid")) - authz := base64.RawURLEncoding.EncodeToString(checksum[:]) - - // Build a leaf certificate which should pass validation - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating leaf private key") - tmpl := &x509.Certificate{ - Subject: pkix.Name{}, - Issuer: pkix.Name{}, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: key.Public(), - SerialNumber: big.NewInt(2), - DNSNames: []string{host}, - ExtraExtensions: []pkix.Extension{ - { - Id: OIDACMEIdentifier, - Critical: true, - Value: []byte(authz), - }, - }, - BasicConstraintsValid: true, - IsCA: true, - } - certBytes, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, key.Public(), key) - require.NoError(t, err, "failed to create leaf certificate") - cert, err := x509.ParseCertificate(certBytes) - require.NoError(t, err, "failed to parse newly generated leaf certificate") - - newTc := alpnTestCase{ - name: "valid certificate; no Subject/Issuer (missing CN)", - certificates: []*x509.Certificate{cert}, - privateKey: key, - protocols: []string{ALPNProtocol}, - token: "valid", - thumbprint: "valid", - shouldFail: false, - } - alpnTestCases = append(alpnTestCases, newTc) - } - - { - // Test case: cert without the extension - // Build a leaf certificate which should fail validation - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating leaf private key") - tmpl := &x509.Certificate{ - Subject: pkix.Name{}, - Issuer: pkix.Name{}, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: key.Public(), - SerialNumber: big.NewInt(1), - DNSNames: []string{host}, - BasicConstraintsValid: true, - IsCA: true, - } - certBytes, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, key.Public(), key) - require.NoError(t, err, "failed to create leaf certificate") - cert, err := x509.ParseCertificate(certBytes) - require.NoError(t, err, "failed to parse newly generated leaf certificate") - - newTc := alpnTestCase{ - name: "missing required acmeIdentifier extension", - certificates: []*x509.Certificate{cert}, - privateKey: key, - protocols: []string{ALPNProtocol}, - token: "valid", - thumbprint: "valid", - shouldFail: true, - } - alpnTestCases = append(alpnTestCases, newTc) - } - - { - // Test case: root without a leaf - // Build a self-signed certificate. - rootKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generating root private key") - tmpl := &x509.Certificate{ - Subject: pkix.Name{ - CommonName: "Root CA", - }, - Issuer: pkix.Name{ - CommonName: "Root CA", - }, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, - PublicKey: rootKey.Public(), - SerialNumber: big.NewInt(1), - BasicConstraintsValid: true, - IsCA: true, - } - rootCertBytes, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, rootKey.Public(), rootKey) - require.NoError(t, err, "failed to create root certificate") - rootCert, err := x509.ParseCertificate(rootCertBytes) - require.NoError(t, err, "failed to parse newly generated root certificate") - - newTc := alpnTestCase{ - name: "root without leaf", - certificates: []*x509.Certificate{rootCert}, - privateKey: rootKey, - protocols: []string{ALPNProtocol}, - token: "valid", - thumbprint: "valid", - shouldFail: true, - } - alpnTestCases = append(alpnTestCases, newTc) - } - - for index, tc := range alpnTestCases { - t.Logf("\n\n[tc=%d/name=%s] starting validation", index, tc.name) - certificates = tc.certificates - privateKey = tc.privateKey - returnedProtocols = tc.protocols - - // Attempt to validate the challenge. - go doOneAccept() - isValid, err := ValidateTLSALPN01Challenge(host, tc.token, tc.thumbprint, config) - if !isValid && err == nil { - t.Fatalf("[tc=%d/name=%s] expected failure to give reason via err (%v / %v)", index, tc.name, isValid, err) - } - - expectedValid := !tc.shouldFail - if expectedValid != isValid { - t.Fatalf("[tc=%d/name=%s] got ret=%v (err=%v), expected ret=%v (shouldFail=%v)", index, tc.name, isValid, err, expectedValid, tc.shouldFail) - } else if err != nil { - t.Logf("[tc=%d/name=%s] got expected failure: err=%v", index, tc.name, err) - } - } -} diff --git a/builtin/logical/pki/acme_eab_policy.go b/builtin/logical/pki/acme_eab_policy.go deleted file mode 100644 index 9a96f3af269e50..00000000000000 --- a/builtin/logical/pki/acme_eab_policy.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "fmt" - "strings" -) - -type EabPolicyName string - -const ( - eabPolicyNotRequired EabPolicyName = "not-required" - eabPolicyNewAccountRequired EabPolicyName = "new-account-required" - eabPolicyAlwaysRequired EabPolicyName = "always-required" -) - -func getEabPolicyByString(name string) (EabPolicy, error) { - lcName := strings.TrimSpace(strings.ToLower(name)) - switch lcName { - case string(eabPolicyNotRequired): - return getEabPolicyByName(eabPolicyNotRequired), nil - case string(eabPolicyNewAccountRequired): - return getEabPolicyByName(eabPolicyNewAccountRequired), nil - case string(eabPolicyAlwaysRequired): - return getEabPolicyByName(eabPolicyAlwaysRequired), nil - default: - return getEabPolicyByName(eabPolicyAlwaysRequired), fmt.Errorf("unknown eab policy name: %s", name) - } -} - -func getEabPolicyByName(name EabPolicyName) EabPolicy { - return EabPolicy{Name: name} -} - -type EabPolicy struct { - Name EabPolicyName -} - -// EnforceForNewAccount for new account creations, should we require an EAB. -func (ep EabPolicy) EnforceForNewAccount(eabData *eabType) error { - if (ep.Name == eabPolicyAlwaysRequired || ep.Name == eabPolicyNewAccountRequired) && eabData == nil { - return ErrExternalAccountRequired - } - - return nil -} - -// EnforceForExistingAccount for all operations within ACME, does the account being used require an EAB attached to it. -func (ep EabPolicy) EnforceForExistingAccount(account *acmeAccount) error { - if ep.Name == eabPolicyAlwaysRequired && account.Eab == nil { - return ErrExternalAccountRequired - } - - return nil -} - -// IsExternalAccountRequired for new accounts incoming does is an EAB required -func (ep EabPolicy) IsExternalAccountRequired() bool { - return ep.Name == eabPolicyAlwaysRequired || ep.Name == eabPolicyNewAccountRequired -} - -// OverrideEnvDisablingPublicAcme determines if ACME is enabled but the OS environment variable -// has said to disable public acme support, if we can override that environment variable to -// turn on ACME support -func (ep EabPolicy) OverrideEnvDisablingPublicAcme() bool { - return ep.Name == eabPolicyAlwaysRequired -} diff --git a/builtin/logical/pki/acme_errors.go b/builtin/logical/pki/acme_errors.go deleted file mode 100644 index 5b73d9d215e0b5..00000000000000 --- a/builtin/logical/pki/acme_errors.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "encoding/json" - "errors" - "fmt" - "net/http" - - "github.com/hashicorp/go-multierror" - "github.com/hashicorp/vault/sdk/logical" -) - -// Error prefix; see RFC 8555 Section 6.7. Errors. -const ErrorPrefix = "urn:ietf:params:acme:error:" -const ErrorContentType = "application/problem+json" - -// See RFC 8555 Section 6.7. Errors. -var ErrAccountDoesNotExist = errors.New("The request specified an account that does not exist") - -var ErrAcmeDisabled = errors.New("ACME feature is disabled") - -var ( - ErrAlreadyRevoked = errors.New("The request specified a certificate to be revoked that has already been revoked") - ErrBadCSR = errors.New("The CSR is unacceptable") - ErrBadNonce = errors.New("The client sent an unacceptable anti-replay nonce") - ErrBadPublicKey = errors.New("The JWS was signed by a public key the server does not support") - ErrBadRevocationReason = errors.New("The revocation reason provided is not allowed by the server") - ErrBadSignatureAlgorithm = errors.New("The JWS was signed with an algorithm the server does not support") - ErrCAA = errors.New("Certification Authority Authorization (CAA) records forbid the CA from issuing a certificate") - ErrCompound = errors.New("Specific error conditions are indicated in the 'subproblems' array") - ErrConnection = errors.New("The server could not connect to validation target") - ErrDNS = errors.New("There was a problem with a DNS query during identifier validation") - ErrExternalAccountRequired = errors.New("The request must include a value for the 'externalAccountBinding' field") - ErrIncorrectResponse = errors.New("Response received didn't match the challenge's requirements") - ErrInvalidContact = errors.New("A contact URL for an account was invalid") - ErrMalformed = errors.New("The request message was malformed") - ErrOrderNotReady = errors.New("The request attempted to finalize an order that is not ready to be finalized") - ErrRateLimited = errors.New("The request exceeds a rate limit") - ErrRejectedIdentifier = errors.New("The server will not issue certificates for the identifier") - ErrServerInternal = errors.New("The server experienced an internal error") - ErrTLS = errors.New("The server received a TLS error during validation") - ErrUnauthorized = errors.New("The client lacks sufficient authorization") - ErrUnsupportedContact = errors.New("A contact URL for an account used an unsupported protocol scheme") - ErrUnsupportedIdentifier = errors.New("An identifier is of an unsupported type") - ErrUserActionRequired = errors.New("Visit the 'instance' URL and take actions specified there") -) - -// Mapping of err->name; see table in RFC 8555 Section 6.7. Errors. -var errIdMappings = map[error]string{ - ErrAccountDoesNotExist: "accountDoesNotExist", - ErrAlreadyRevoked: "alreadyRevoked", - ErrBadCSR: "badCSR", - ErrBadNonce: "badNonce", - ErrBadPublicKey: "badPublicKey", - ErrBadRevocationReason: "badRevocationReason", - ErrBadSignatureAlgorithm: "badSignatureAlgorithm", - ErrCAA: "caa", - ErrCompound: "compound", - ErrConnection: "connection", - ErrDNS: "dns", - ErrExternalAccountRequired: "externalAccountRequired", - ErrIncorrectResponse: "incorrectResponse", - ErrInvalidContact: "invalidContact", - ErrMalformed: "malformed", - ErrOrderNotReady: "orderNotReady", - ErrRateLimited: "rateLimited", - ErrRejectedIdentifier: "rejectedIdentifier", - ErrServerInternal: "serverInternal", - ErrTLS: "tls", - ErrUnauthorized: "unauthorized", - ErrUnsupportedContact: "unsupportedContact", - ErrUnsupportedIdentifier: "unsupportedIdentifier", - ErrUserActionRequired: "userActionRequired", -} - -// Mapping of err->status codes; see table in RFC 8555 Section 6.7. Errors. -var errCodeMappings = map[error]int{ - ErrAccountDoesNotExist: http.StatusBadRequest, // See RFC 8555 Section 7.3.1. Finding an Account URL Given a Key. - ErrAlreadyRevoked: http.StatusBadRequest, - ErrBadCSR: http.StatusBadRequest, - ErrBadNonce: http.StatusBadRequest, - ErrBadPublicKey: http.StatusBadRequest, - ErrBadRevocationReason: http.StatusBadRequest, - ErrBadSignatureAlgorithm: http.StatusBadRequest, - ErrCAA: http.StatusForbidden, - ErrCompound: http.StatusBadRequest, - ErrConnection: http.StatusInternalServerError, - ErrDNS: http.StatusInternalServerError, - ErrExternalAccountRequired: http.StatusUnauthorized, - ErrIncorrectResponse: http.StatusBadRequest, - ErrInvalidContact: http.StatusBadRequest, - ErrMalformed: http.StatusBadRequest, - ErrOrderNotReady: http.StatusForbidden, // See RFC 8555 Section 7.4. Applying for Certificate Issuance. - ErrRateLimited: http.StatusTooManyRequests, - ErrRejectedIdentifier: http.StatusBadRequest, - ErrServerInternal: http.StatusInternalServerError, - ErrTLS: http.StatusInternalServerError, - ErrUnauthorized: http.StatusUnauthorized, - ErrUnsupportedContact: http.StatusBadRequest, - ErrUnsupportedIdentifier: http.StatusBadRequest, - ErrUserActionRequired: http.StatusUnauthorized, -} - -type ErrorResponse struct { - StatusCode int `json:"-"` - Type string `json:"type"` - Detail string `json:"detail"` - Subproblems []*ErrorResponse `json:"subproblems"` -} - -func (e *ErrorResponse) MarshalForStorage() map[string]interface{} { - subProblems := []map[string]interface{}{} - for _, subProblem := range e.Subproblems { - subProblems = append(subProblems, subProblem.MarshalForStorage()) - } - return map[string]interface{}{ - "status": e.StatusCode, - "type": e.Type, - "detail": e.Detail, - "subproblems": subProblems, - } -} - -func (e *ErrorResponse) Marshal() (*logical.Response, error) { - body, err := json.Marshal(e) - if err != nil { - return nil, fmt.Errorf("failed marshalling of error response: %w", err) - } - - var resp logical.Response - resp.Data = map[string]interface{}{ - logical.HTTPContentType: ErrorContentType, - logical.HTTPRawBody: body, - logical.HTTPStatusCode: e.StatusCode, - } - - return &resp, nil -} - -func FindType(given error) (err error, id string, code int, found bool) { - matchedError := false - for err, id = range errIdMappings { - if errors.Is(given, err) { - matchedError = true - break - } - } - - // If the given error was not matched from one of the standard ACME errors - // make this error, force ErrServerInternal - if !matchedError { - err = ErrServerInternal - id = errIdMappings[err] - } - - code = errCodeMappings[err] - - return -} - -func TranslateError(given error) (*logical.Response, error) { - if errors.Is(given, logical.ErrReadOnly) { - return nil, given - } - - if errors.Is(given, ErrAcmeDisabled) { - return logical.RespondWithStatusCode(nil, nil, http.StatusNotFound) - } - - body := TranslateErrorToErrorResponse(given) - - return body.Marshal() -} - -func TranslateErrorToErrorResponse(given error) ErrorResponse { - // We're multierror aware here: if we're given a list of errors, assume - // they're structured so the first error is the outer error and the inner - // subproblems are subsequent in the multierror. - var remaining []error - if unwrapped, ok := given.(*multierror.Error); ok { - remaining = unwrapped.Errors[1:] - given = unwrapped.Errors[0] - } - - _, id, code, found := FindType(given) - if !found && len(remaining) > 0 { - // Translate multierrors into a generic error code. - id = errIdMappings[ErrCompound] - code = errCodeMappings[ErrCompound] - } - - var body ErrorResponse - body.Type = ErrorPrefix + id - body.Detail = given.Error() - body.StatusCode = code - - for _, subgiven := range remaining { - _, subid, _, _ := FindType(subgiven) - - var sub ErrorResponse - sub.Type = ErrorPrefix + subid - body.Detail = subgiven.Error() - - body.Subproblems = append(body.Subproblems, &sub) - } - return body -} diff --git a/builtin/logical/pki/acme_jws.go b/builtin/logical/pki/acme_jws.go deleted file mode 100644 index 3f6ba6d27187c5..00000000000000 --- a/builtin/logical/pki/acme_jws.go +++ /dev/null @@ -1,278 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "bytes" - "crypto" - "encoding/base64" - "encoding/json" - "fmt" - "strings" - - "github.com/go-jose/go-jose/v3" -) - -var AllowedOuterJWSTypes = map[string]interface{}{ - "RS256": true, - "RS384": true, - "RS512": true, - "PS256": true, - "PS384": true, - "PS512": true, - "ES256": true, - "ES384": true, - "ES512": true, - "EdDSA2": true, -} - -var AllowedEabJWSTypes = map[string]interface{}{ - "HS256": true, - "HS384": true, - "HS512": true, -} - -// This wraps a JWS message structure. -type jwsCtx struct { - Algo string `json:"alg"` - Kid string `json:"kid"` - Jwk json.RawMessage `json:"jwk"` - Nonce string `json:"nonce"` - Url string `json:"url"` - Key jose.JSONWebKey `json:"-"` - Existing bool `json:"-"` -} - -func (c *jwsCtx) GetKeyThumbprint() (string, error) { - keyThumbprint, err := c.Key.Thumbprint(crypto.SHA256) - if err != nil { - return "", fmt.Errorf("failed creating thumbprint: %w", err) - } - return base64.RawURLEncoding.EncodeToString(keyThumbprint), nil -} - -func UnmarshalEabJwsJson(eabBytes []byte) (*jwsCtx, error) { - var eabJws jwsCtx - var err error - if err = json.Unmarshal(eabBytes, &eabJws); err != nil { - return nil, err - } - - if eabJws.Kid == "" { - return nil, fmt.Errorf("invalid header: got missing required field 'kid': %w", ErrMalformed) - } - - if _, present := AllowedEabJWSTypes[eabJws.Algo]; !present { - return nil, fmt.Errorf("invalid header: unexpected value for 'algo': %w", ErrMalformed) - } - - return &eabJws, nil -} - -func (c *jwsCtx) UnmarshalOuterJwsJson(a *acmeState, ac *acmeContext, jws []byte) error { - var err error - if err = json.Unmarshal(jws, c); err != nil { - return err - } - - if c.Kid != "" && len(c.Jwk) > 0 { - // See RFC 8555 Section 6.2. Request Authentication: - // - // > The "jwk" and "kid" fields are mutually exclusive. Servers MUST - // > reject requests that contain both. - return fmt.Errorf("invalid header: got both account 'kid' and 'jwk' in the same message; expected only one: %w", ErrMalformed) - } - - if c.Kid == "" && len(c.Jwk) == 0 { - // See RFC 8555 Section 6.2. Request Authentication: - // - // > Either "jwk" (JSON Web Key) or "kid" (Key ID) as specified - // > below - return fmt.Errorf("invalid header: got neither required fields of 'kid' nor 'jwk': %w", ErrMalformed) - } - - if _, present := AllowedOuterJWSTypes[c.Algo]; !present { - // See RFC 8555 Section 6.2. Request Authentication: - // - // > The JWS Protected Header MUST include the following fields: - // > - // > - "alg" (Algorithm) - // > - // > * This field MUST NOT contain "none" or a Message - // > Authentication Code (MAC) algorithm (e.g. one in which the - // > algorithm registry description mentions MAC/HMAC). - return fmt.Errorf("invalid header: unexpected value for 'algo': %w", ErrMalformed) - } - - if c.Kid != "" { - // Load KID from storage first. - kid := getKeyIdFromAccountUrl(c.Kid) - c.Jwk, err = a.LoadJWK(ac, kid) - if err != nil { - return err - } - c.Kid = kid // Use the uuid itself, not the full account url that was originally provided to us. - c.Existing = true - } - - if err = c.Key.UnmarshalJSON(c.Jwk); err != nil { - return err - } - - if !c.Key.Valid() { - return fmt.Errorf("received invalid jwk: %w", ErrMalformed) - } - - if c.Kid == "" { - c.Kid = genUuid() - c.Existing = false - } - - return nil -} - -func getKeyIdFromAccountUrl(accountUrl string) string { - pieces := strings.Split(accountUrl, "/") - return pieces[len(pieces)-1] -} - -func hasValues(h jose.Header) bool { - return h.KeyID != "" || h.JSONWebKey != nil || h.Algorithm != "" || h.Nonce != "" || len(h.ExtraHeaders) > 0 -} - -func (c *jwsCtx) VerifyJWS(signature string) (map[string]interface{}, error) { - // See RFC 8555 Section 6.2. Request Authentication: - // - // > The JWS Unencoded Payload Option [RFC7797] MUST NOT be used - // - // This is validated by go-jose. - sig, err := jose.ParseSigned(signature) - if err != nil { - return nil, fmt.Errorf("error parsing signature: %s: %w", err, ErrMalformed) - } - - if len(sig.Signatures) > 1 { - // See RFC 8555 Section 6.2. Request Authentication: - // - // > The JWS MUST NOT have multiple signatures - return nil, fmt.Errorf("request had multiple signatures: %w", ErrMalformed) - } - - if hasValues(sig.Signatures[0].Unprotected) { - // See RFC 8555 Section 6.2. Request Authentication: - // - // > The JWS Unprotected Header [RFC7515] MUST NOT be used - return nil, fmt.Errorf("request had unprotected headers: %w", ErrMalformed) - } - - payload, err := sig.Verify(c.Key) - if err != nil { - return nil, err - } - - if len(payload) == 0 { - // Distinguish POST-AS-GET from POST-with-an-empty-body. - return nil, nil - } - - var m map[string]interface{} - if err := json.Unmarshal(payload, &m); err != nil { - return nil, fmt.Errorf("failed to json unmarshal 'payload': %s: %w", err, ErrMalformed) - } - - return m, nil -} - -func verifyEabPayload(acmeState *acmeState, ac *acmeContext, outer *jwsCtx, expectedPath string, payload map[string]interface{}) (*eabType, error) { - // Parse the key out. - rawProtectedBase64, ok := payload["protected"] - if !ok { - return nil, fmt.Errorf("missing required field 'protected': %w", ErrMalformed) - } - jwkBase64 := rawProtectedBase64.(string) - - jwkBytes, err := base64.RawURLEncoding.DecodeString(jwkBase64) - if err != nil { - return nil, fmt.Errorf("failed to base64 parse eab 'protected': %s: %w", err, ErrMalformed) - } - - eabJws, err := UnmarshalEabJwsJson(jwkBytes) - if err != nil { - return nil, fmt.Errorf("failed to json unmarshal eab 'protected': %w", err) - } - - if len(eabJws.Url) == 0 { - return nil, fmt.Errorf("missing required parameter 'url' in eab 'protected': %w", ErrMalformed) - } - expectedUrl := ac.clusterUrl.JoinPath(expectedPath).String() - if expectedUrl != eabJws.Url { - return nil, fmt.Errorf("invalid value for 'url' in eab 'protected': got '%v' expected '%v': %w", eabJws.Url, expectedUrl, ErrUnauthorized) - } - - if len(eabJws.Nonce) != 0 { - return nil, fmt.Errorf("nonce should not be provided in eab 'protected': %w", ErrMalformed) - } - - rawPayloadBase64, ok := payload["payload"] - if !ok { - return nil, fmt.Errorf("missing required field eab 'payload': %w", ErrMalformed) - } - payloadBase64, ok := rawPayloadBase64.(string) - if !ok { - return nil, fmt.Errorf("failed to parse 'payload' field: %w", ErrMalformed) - } - - rawSignatureBase64, ok := payload["signature"] - if !ok { - return nil, fmt.Errorf("missing required field 'signature': %w", ErrMalformed) - } - signatureBase64, ok := rawSignatureBase64.(string) - if !ok { - return nil, fmt.Errorf("failed to parse 'signature' field: %w", ErrMalformed) - } - - // go-jose only seems to support compact signature encodings. - compactSig := fmt.Sprintf("%v.%v.%v", jwkBase64, payloadBase64, signatureBase64) - sig, err := jose.ParseSigned(compactSig) - if err != nil { - return nil, fmt.Errorf("error parsing eab signature: %s: %w", err, ErrMalformed) - } - - if len(sig.Signatures) > 1 { - // See RFC 8555 Section 6.2. Request Authentication: - // - // > The JWS MUST NOT have multiple signatures - return nil, fmt.Errorf("eab had multiple signatures: %w", ErrMalformed) - } - - if hasValues(sig.Signatures[0].Unprotected) { - // See RFC 8555 Section 6.2. Request Authentication: - // - // > The JWS Unprotected Header [RFC7515] MUST NOT be used - return nil, fmt.Errorf("eab had unprotected headers: %w", ErrMalformed) - } - - // Load the EAB to validate the signature against - eabEntry, err := acmeState.LoadEab(ac.sc, eabJws.Kid) - if err != nil { - return nil, fmt.Errorf("%w: failed to verify eab", ErrUnauthorized) - } - - verifiedPayload, err := sig.Verify(eabEntry.PrivateBytes) - if err != nil { - return nil, err - } - - // Make sure how eab payload matches the outer JWK key value - if !bytes.Equal(outer.Jwk, verifiedPayload) { - return nil, fmt.Errorf("eab payload does not match outer JWK key: %w", ErrMalformed) - } - - if eabEntry.AcmeDirectory != ac.acmeDirectory { - // This EAB was not created for this specific ACME directory, reject it - return nil, fmt.Errorf("%w: failed to verify eab", ErrUnauthorized) - } - - return eabEntry, nil -} diff --git a/builtin/logical/pki/acme_state.go b/builtin/logical/pki/acme_state.go deleted file mode 100644 index c026de3d5cb5f8..00000000000000 --- a/builtin/logical/pki/acme_state.go +++ /dev/null @@ -1,659 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "crypto/rand" - "encoding/base64" - "errors" - "fmt" - "io" - "net" - "path" - "strings" - "sync" - "sync/atomic" - "time" - - "github.com/hashicorp/go-secure-stdlib/nonceutil" - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -const ( - // How many bytes are in a token. Per RFC 8555 Section - // 8.3. HTTP Challenge and Section 11.3 Token Entropy: - // - // > token (required, string): A random value that uniquely identifies - // > the challenge. This value MUST have at least 128 bits of entropy. - tokenBytes = 128 / 8 - - // Path Prefixes - acmePathPrefix = "acme/" - acmeAccountPrefix = acmePathPrefix + "accounts/" - acmeThumbprintPrefix = acmePathPrefix + "account-thumbprints/" - acmeValidationPrefix = acmePathPrefix + "validations/" - acmeEabPrefix = acmePathPrefix + "eab/" -) - -type acmeState struct { - nonces nonceutil.NonceService - - validator *ACMEChallengeEngine - - configDirty *atomic.Bool - _config sync.RWMutex - config acmeConfigEntry -} - -type acmeThumbprint struct { - Kid string `json:"kid"` - Thumbprint string `json:"-"` -} - -func NewACMEState() *acmeState { - state := &acmeState{ - nonces: nonceutil.NewNonceService(), - validator: NewACMEChallengeEngine(), - configDirty: new(atomic.Bool), - } - // Config hasn't been loaded yet; mark dirty. - state.configDirty.Store(true) - - return state -} - -func (a *acmeState) Initialize(b *backend, sc *storageContext) error { - // Initialize the nonce service. - if err := a.nonces.Initialize(); err != nil { - return fmt.Errorf("failed to initialize the ACME nonce service: %w", err) - } - - // Load the ACME config. - _, err := a.getConfigWithUpdate(sc) - if err != nil { - return fmt.Errorf("error initializing ACME engine: %w", err) - } - - // Kick off our ACME challenge validation engine. - go a.validator.Run(b, a, sc) - - // All good. - return nil -} - -func (a *acmeState) markConfigDirty() { - a.configDirty.Store(true) -} - -func (a *acmeState) reloadConfigIfRequired(sc *storageContext) error { - if !a.configDirty.Load() { - return nil - } - - a._config.Lock() - defer a._config.Unlock() - - if !a.configDirty.Load() { - // Someone beat us to grabbing the above write lock and already - // updated the config. - return nil - } - - config, err := sc.getAcmeConfig() - if err != nil { - return fmt.Errorf("failed reading config: %w", err) - } - - a.config = *config - a.configDirty.Store(false) - - return nil -} - -func (a *acmeState) getConfigWithUpdate(sc *storageContext) (*acmeConfigEntry, error) { - if err := a.reloadConfigIfRequired(sc); err != nil { - return nil, err - } - - a._config.RLock() - defer a._config.RUnlock() - - configCopy := a.config - return &configCopy, nil -} - -func generateRandomBase64(srcBytes int) (string, error) { - data := make([]byte, 21) - if _, err := io.ReadFull(rand.Reader, data); err != nil { - return "", err - } - - return base64.RawURLEncoding.EncodeToString(data), nil -} - -func (a *acmeState) GetNonce() (string, time.Time, error) { - return a.nonces.Get() -} - -func (a *acmeState) RedeemNonce(nonce string) bool { - return a.nonces.Redeem(nonce) -} - -func (a *acmeState) DoTidyNonces() { - a.nonces.Tidy() -} - -type ACMEAccountStatus string - -func (aas ACMEAccountStatus) String() string { - return string(aas) -} - -const ( - AccountStatusValid ACMEAccountStatus = "valid" - AccountStatusDeactivated ACMEAccountStatus = "deactivated" - AccountStatusRevoked ACMEAccountStatus = "revoked" -) - -type acmeAccount struct { - KeyId string `json:"-"` - Status ACMEAccountStatus `json:"status"` - Contact []string `json:"contact"` - TermsOfServiceAgreed bool `json:"terms-of-service-agreed"` - Jwk []byte `json:"jwk"` - AcmeDirectory string `json:"acme-directory"` - AccountCreatedDate time.Time `json:"account-created-date"` - MaxCertExpiry time.Time `json:"account-max-cert-expiry"` - AccountRevokedDate time.Time `json:"account-revoked-date"` - Eab *eabType `json:"eab"` -} - -type acmeOrder struct { - OrderId string `json:"-"` - AccountId string `json:"account-id"` - Status ACMEOrderStatusType `json:"status"` - Expires time.Time `json:"expires"` - Identifiers []*ACMEIdentifier `json:"identifiers"` - AuthorizationIds []string `json:"authorization-ids"` - CertificateSerialNumber string `json:"cert-serial-number"` - CertificateExpiry time.Time `json:"cert-expiry"` - // The actual issuer UUID that issued the certificate, blank if an order exists but no certificate was issued. - IssuerId issuerID `json:"issuer-id"` -} - -func (o acmeOrder) getIdentifierDNSValues() []string { - var identifiers []string - for _, value := range o.Identifiers { - if value.Type == ACMEDNSIdentifier { - // Here, because of wildcard processing, we need to use the - // original value provided by the caller rather than the - // post-modification (trimmed '*.' prefix) value. - identifiers = append(identifiers, value.OriginalValue) - } - } - return identifiers -} - -func (o acmeOrder) getIdentifierIPValues() []net.IP { - var identifiers []net.IP - for _, value := range o.Identifiers { - if value.Type == ACMEIPIdentifier { - identifiers = append(identifiers, net.ParseIP(value.Value)) - } - } - return identifiers -} - -func (a *acmeState) CreateAccount(ac *acmeContext, c *jwsCtx, contact []string, termsOfServiceAgreed bool, eab *eabType) (*acmeAccount, error) { - // Write out the thumbprint value/entry out first, if we get an error mid-way through - // this is easier to recover from. The new kid with the same existing public key - // will rewrite the thumbprint entry. This goes in hand with LoadAccountByKey that - // will return a nil, nil value if the referenced kid in a loaded thumbprint does not - // exist. This effectively makes this self-healing IF the end-user re-attempts the - // account creation with the same public key. - thumbprint, err := c.GetKeyThumbprint() - if err != nil { - return nil, fmt.Errorf("failed generating thumbprint: %w", err) - } - - thumbPrint := &acmeThumbprint{ - Kid: c.Kid, - Thumbprint: thumbprint, - } - thumbPrintEntry, err := logical.StorageEntryJSON(acmeThumbprintPrefix+thumbprint, thumbPrint) - if err != nil { - return nil, fmt.Errorf("error generating account thumbprint entry: %w", err) - } - - if err = ac.sc.Storage.Put(ac.sc.Context, thumbPrintEntry); err != nil { - return nil, fmt.Errorf("error writing account thumbprint entry: %w", err) - } - - // Now write out the main value that the thumbprint points too. - acct := &acmeAccount{ - KeyId: c.Kid, - Contact: contact, - TermsOfServiceAgreed: termsOfServiceAgreed, - Jwk: c.Jwk, - Status: AccountStatusValid, - AcmeDirectory: ac.acmeDirectory, - AccountCreatedDate: time.Now(), - Eab: eab, - } - json, err := logical.StorageEntryJSON(acmeAccountPrefix+c.Kid, acct) - if err != nil { - return nil, fmt.Errorf("error creating account entry: %w", err) - } - - if err := ac.sc.Storage.Put(ac.sc.Context, json); err != nil { - return nil, fmt.Errorf("error writing account entry: %w", err) - } - - return acct, nil -} - -func (a *acmeState) UpdateAccount(ac *acmeContext, acct *acmeAccount) error { - json, err := logical.StorageEntryJSON(acmeAccountPrefix+acct.KeyId, acct) - if err != nil { - return fmt.Errorf("error creating account entry: %w", err) - } - - if err := ac.sc.Storage.Put(ac.sc.Context, json); err != nil { - return fmt.Errorf("error writing account entry: %w", err) - } - - return nil -} - -// LoadAccount will load the account object based on the passed in keyId field value -// otherwise will return an error if the account does not exist. -func (a *acmeState) LoadAccount(ac *acmeContext, keyId string) (*acmeAccount, error) { - entry, err := ac.sc.Storage.Get(ac.sc.Context, acmeAccountPrefix+keyId) - if err != nil { - return nil, fmt.Errorf("error loading account: %w", err) - } - if entry == nil { - return nil, fmt.Errorf("account not found: %w", ErrAccountDoesNotExist) - } - - var acct acmeAccount - err = entry.DecodeJSON(&acct) - if err != nil { - return nil, fmt.Errorf("error decoding account: %w", err) - } - - if acct.AcmeDirectory != ac.acmeDirectory { - return nil, fmt.Errorf("%w: account part of different ACME directory path", ErrMalformed) - } - - acct.KeyId = keyId - - return &acct, nil -} - -// LoadAccountByKey will attempt to load the account based on a key thumbprint. If the thumbprint -// or kid is unknown a nil, nil will be returned. -func (a *acmeState) LoadAccountByKey(ac *acmeContext, keyThumbprint string) (*acmeAccount, error) { - thumbprintEntry, err := ac.sc.Storage.Get(ac.sc.Context, acmeThumbprintPrefix+keyThumbprint) - if err != nil { - return nil, fmt.Errorf("failed loading acme thumbprintEntry for key: %w", err) - } - if thumbprintEntry == nil { - return nil, nil - } - - var thumbprint acmeThumbprint - err = thumbprintEntry.DecodeJSON(&thumbprint) - if err != nil { - return nil, fmt.Errorf("failed decoding thumbprint entry: %s: %w", keyThumbprint, err) - } - - if len(thumbprint.Kid) == 0 { - return nil, fmt.Errorf("empty kid within thumbprint entry: %s", keyThumbprint) - } - - acct, err := a.LoadAccount(ac, thumbprint.Kid) - if err != nil { - // If we fail to lookup the account that the thumbprint entry references, assume a bad - // write previously occurred in which we managed to write out the thumbprint but failed - // writing out the main account information. - if errors.Is(err, ErrAccountDoesNotExist) { - return nil, nil - } - return nil, err - } - - return acct, nil -} - -func (a *acmeState) LoadJWK(ac *acmeContext, keyId string) ([]byte, error) { - key, err := a.LoadAccount(ac, keyId) - if err != nil { - return nil, err - } - - if len(key.Jwk) == 0 { - return nil, fmt.Errorf("malformed key entry lacks JWK") - } - - return key.Jwk, nil -} - -func (a *acmeState) LoadAuthorization(ac *acmeContext, userCtx *jwsCtx, authId string) (*ACMEAuthorization, error) { - if authId == "" { - return nil, fmt.Errorf("malformed authorization identifier") - } - - authorizationPath := getAuthorizationPath(userCtx.Kid, authId) - - authz, err := loadAuthorizationAtPath(ac.sc, authorizationPath) - if err != nil { - return nil, err - } - - if userCtx.Kid != authz.AccountId { - return nil, ErrUnauthorized - } - - return authz, nil -} - -func loadAuthorizationAtPath(sc *storageContext, authorizationPath string) (*ACMEAuthorization, error) { - entry, err := sc.Storage.Get(sc.Context, authorizationPath) - if err != nil { - return nil, fmt.Errorf("error loading authorization: %w", err) - } - - if entry == nil { - return nil, fmt.Errorf("authorization does not exist: %w", ErrMalformed) - } - - var authz ACMEAuthorization - err = entry.DecodeJSON(&authz) - if err != nil { - return nil, fmt.Errorf("error decoding authorization: %w", err) - } - - return &authz, nil -} - -func (a *acmeState) SaveAuthorization(ac *acmeContext, authz *ACMEAuthorization) error { - path := getAuthorizationPath(authz.AccountId, authz.Id) - return saveAuthorizationAtPath(ac.sc, path, authz) -} - -func saveAuthorizationAtPath(sc *storageContext, path string, authz *ACMEAuthorization) error { - if authz.Id == "" { - return fmt.Errorf("invalid authorization, missing id") - } - - if authz.AccountId == "" { - return fmt.Errorf("invalid authorization, missing account id") - } - - json, err := logical.StorageEntryJSON(path, authz) - if err != nil { - return fmt.Errorf("error creating authorization entry: %w", err) - } - - if err = sc.Storage.Put(sc.Context, json); err != nil { - return fmt.Errorf("error writing authorization entry: %w", err) - } - - return nil -} - -func (a *acmeState) ParseRequestParams(ac *acmeContext, req *logical.Request, data *framework.FieldData) (*jwsCtx, map[string]interface{}, error) { - var c jwsCtx - var m map[string]interface{} - - // Parse the key out. - rawJWKBase64, ok := data.GetOk("protected") - if !ok { - return nil, nil, fmt.Errorf("missing required field 'protected': %w", ErrMalformed) - } - jwkBase64 := rawJWKBase64.(string) - - jwkBytes, err := base64.RawURLEncoding.DecodeString(jwkBase64) - if err != nil { - return nil, nil, fmt.Errorf("failed to base64 parse 'protected': %s: %w", err, ErrMalformed) - } - if err = c.UnmarshalOuterJwsJson(a, ac, jwkBytes); err != nil { - return nil, nil, fmt.Errorf("failed to json unmarshal 'protected': %w", err) - } - - // Since we already parsed the header to verify the JWS context, we - // should read and redeem the nonce here too, to avoid doing any extra - // work if it is invalid. - if !a.RedeemNonce(c.Nonce) { - return nil, nil, fmt.Errorf("invalid or reused nonce: %w", ErrBadNonce) - } - - // If the path is incorrect, reject the request. - // - // See RFC 8555 Section 6.4. Request URL Integrity: - // - // > As noted in Section 6.2, all ACME request objects carry a "url" - // > header parameter in their protected header. ... On receiving such - // > an object in an HTTP request, the server MUST compare the "url" - // > header parameter to the request URL. If the two do not match, - // > then the server MUST reject the request as unauthorized. - if len(c.Url) == 0 { - return nil, nil, fmt.Errorf("missing required parameter 'url' in 'protected': %w", ErrMalformed) - } - if ac.clusterUrl.JoinPath(req.Path).String() != c.Url { - return nil, nil, fmt.Errorf("invalid value for 'url' in 'protected': got '%v' expected '%v': %w", c.Url, ac.clusterUrl.JoinPath(req.Path).String(), ErrUnauthorized) - } - - rawPayloadBase64, ok := data.GetOk("payload") - if !ok { - return nil, nil, fmt.Errorf("missing required field 'payload': %w", ErrMalformed) - } - payloadBase64 := rawPayloadBase64.(string) - - rawSignatureBase64, ok := data.GetOk("signature") - if !ok { - return nil, nil, fmt.Errorf("missing required field 'signature': %w", ErrMalformed) - } - signatureBase64 := rawSignatureBase64.(string) - - // go-jose only seems to support compact signature encodings. - compactSig := fmt.Sprintf("%v.%v.%v", jwkBase64, payloadBase64, signatureBase64) - m, err = c.VerifyJWS(compactSig) - if err != nil { - return nil, nil, fmt.Errorf("failed to verify signature: %w", err) - } - - return &c, m, nil -} - -func (a *acmeState) LoadOrder(ac *acmeContext, userCtx *jwsCtx, orderId string) (*acmeOrder, error) { - path := getOrderPath(userCtx.Kid, orderId) - entry, err := ac.sc.Storage.Get(ac.sc.Context, path) - if err != nil { - return nil, fmt.Errorf("error loading order: %w", err) - } - - if entry == nil { - return nil, fmt.Errorf("order does not exist: %w", ErrMalformed) - } - - var order acmeOrder - err = entry.DecodeJSON(&order) - if err != nil { - return nil, fmt.Errorf("error decoding order: %w", err) - } - - if userCtx.Kid != order.AccountId { - return nil, ErrUnauthorized - } - - order.OrderId = orderId - - return &order, nil -} - -func (a *acmeState) SaveOrder(ac *acmeContext, order *acmeOrder) error { - if order.OrderId == "" { - return fmt.Errorf("invalid order, missing order id") - } - - if order.AccountId == "" { - return fmt.Errorf("invalid order, missing account id") - } - path := getOrderPath(order.AccountId, order.OrderId) - json, err := logical.StorageEntryJSON(path, order) - if err != nil { - return fmt.Errorf("error serializing order entry: %w", err) - } - - if err = ac.sc.Storage.Put(ac.sc.Context, json); err != nil { - return fmt.Errorf("error writing order entry: %w", err) - } - - return nil -} - -func (a *acmeState) ListOrderIds(ac *acmeContext, accountId string) ([]string, error) { - accountOrderPrefixPath := acmeAccountPrefix + accountId + "/orders/" - - rawOrderIds, err := ac.sc.Storage.List(ac.sc.Context, accountOrderPrefixPath) - if err != nil { - return nil, fmt.Errorf("failed listing order ids for account %s: %w", accountId, err) - } - - orderIds := []string{} - for _, order := range rawOrderIds { - if strings.HasSuffix(order, "/") { - // skip any folders we might have for some reason - continue - } - orderIds = append(orderIds, order) - } - return orderIds, nil -} - -type acmeCertEntry struct { - Serial string `json:"-"` - Account string `json:"-"` - Order string `json:"order"` -} - -func (a *acmeState) TrackIssuedCert(ac *acmeContext, accountId string, serial string, orderId string) error { - path := getAcmeSerialToAccountTrackerPath(accountId, serial) - entry := acmeCertEntry{ - Order: orderId, - } - - json, err := logical.StorageEntryJSON(path, &entry) - if err != nil { - return fmt.Errorf("error serializing acme cert entry: %w", err) - } - - if err = ac.sc.Storage.Put(ac.sc.Context, json); err != nil { - return fmt.Errorf("error writing acme cert entry: %w", err) - } - - return nil -} - -func (a *acmeState) GetIssuedCert(ac *acmeContext, accountId string, serial string) (*acmeCertEntry, error) { - path := acmeAccountPrefix + accountId + "/certs/" + normalizeSerial(serial) - - entry, err := ac.sc.Storage.Get(ac.sc.Context, path) - if err != nil { - return nil, fmt.Errorf("error loading acme cert entry: %w", err) - } - - if entry == nil { - return nil, fmt.Errorf("no certificate with this serial was issued for this account") - } - - var cert acmeCertEntry - err = entry.DecodeJSON(&cert) - if err != nil { - return nil, fmt.Errorf("error decoding acme cert entry: %w", err) - } - - cert.Serial = denormalizeSerial(serial) - cert.Account = accountId - - return &cert, nil -} - -func (a *acmeState) SaveEab(sc *storageContext, eab *eabType) error { - json, err := logical.StorageEntryJSON(path.Join(acmeEabPrefix, eab.KeyID), eab) - if err != nil { - return err - } - return sc.Storage.Put(sc.Context, json) -} - -func (a *acmeState) LoadEab(sc *storageContext, eabKid string) (*eabType, error) { - rawEntry, err := sc.Storage.Get(sc.Context, path.Join(acmeEabPrefix, eabKid)) - if err != nil { - return nil, err - } - if rawEntry == nil { - return nil, fmt.Errorf("%w: no eab found for kid %s", ErrStorageItemNotFound, eabKid) - } - - var eab eabType - err = rawEntry.DecodeJSON(&eab) - if err != nil { - return nil, err - } - - eab.KeyID = eabKid - return &eab, nil -} - -func (a *acmeState) DeleteEab(sc *storageContext, eabKid string) (bool, error) { - rawEntry, err := sc.Storage.Get(sc.Context, path.Join(acmeEabPrefix, eabKid)) - if err != nil { - return false, err - } - if rawEntry == nil { - return false, nil - } - - err = sc.Storage.Delete(sc.Context, path.Join(acmeEabPrefix, eabKid)) - if err != nil { - return false, err - } - return true, nil -} - -func (a *acmeState) ListEabIds(sc *storageContext) ([]string, error) { - entries, err := sc.Storage.List(sc.Context, acmeEabPrefix) - if err != nil { - return nil, err - } - var ids []string - for _, entry := range entries { - if strings.HasSuffix(entry, "/") { - continue - } - ids = append(ids, entry) - } - - return ids, nil -} - -func getAcmeSerialToAccountTrackerPath(accountId string, serial string) string { - return acmeAccountPrefix + accountId + "/certs/" + normalizeSerial(serial) -} - -func getAuthorizationPath(accountId string, authId string) string { - return acmeAccountPrefix + accountId + "/authorizations/" + authId -} - -func getOrderPath(accountId string, orderId string) string { - return acmeAccountPrefix + accountId + "/orders/" + orderId -} - -func getACMEToken() (string, error) { - return generateRandomBase64(tokenBytes) -} diff --git a/builtin/logical/pki/acme_state_test.go b/builtin/logical/pki/acme_state_test.go deleted file mode 100644 index 8d4f12127ab207..00000000000000 --- a/builtin/logical/pki/acme_state_test.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestAcmeNonces(t *testing.T) { - t.Parallel() - - a := NewACMEState() - a.nonces.Initialize() - - // Simple operation should succeed. - nonce, _, err := a.GetNonce() - require.NoError(t, err) - require.NotEmpty(t, nonce) - - require.True(t, a.RedeemNonce(nonce)) - require.False(t, a.RedeemNonce(nonce)) - - // Redeeming in opposite order should work. - var nonces []string - for i := 0; i < len(nonce); i++ { - nonce, _, err = a.GetNonce() - require.NoError(t, err) - require.NotEmpty(t, nonce) - } - - for i := len(nonces) - 1; i >= 0; i-- { - nonce = nonces[i] - require.True(t, a.RedeemNonce(nonce)) - } - - for i := 0; i < len(nonces); i++ { - nonce = nonces[i] - require.False(t, a.RedeemNonce(nonce)) - } -} diff --git a/builtin/logical/pki/acme_wrappers.go b/builtin/logical/pki/acme_wrappers.go deleted file mode 100644 index ef83bbce2c553c..00000000000000 --- a/builtin/logical/pki/acme_wrappers.go +++ /dev/null @@ -1,467 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "net/url" - "strings" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -type acmeContext struct { - // baseUrl is the combination of the configured cluster local URL and the acmePath up to /acme/ - baseUrl *url.URL - clusterUrl *url.URL - sc *storageContext - role *roleEntry - issuer *issuerEntry - // acmeDirectory is a string that can distinguish the various acme directories we have configured - // if something needs to remain locked into a directory path structure. - acmeDirectory string - eabPolicy EabPolicy -} - -func (c acmeContext) getAcmeState() *acmeState { - return c.sc.Backend.acmeState -} - -type ( - acmeOperation func(acmeCtx *acmeContext, r *logical.Request, _ *framework.FieldData) (*logical.Response, error) - acmeParsedOperation func(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}) (*logical.Response, error) - acmeAccountRequiredOperation func(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}, acct *acmeAccount) (*logical.Response, error) -) - -// acmeErrorWrapper the lowest level wrapper that will translate errors into proper ACME error responses -func acmeErrorWrapper(op framework.OperationFunc) framework.OperationFunc { - return func(ctx context.Context, r *logical.Request, data *framework.FieldData) (*logical.Response, error) { - resp, err := op(ctx, r, data) - if err != nil { - return TranslateError(err) - } - - return resp, nil - } -} - -// acmeWrapper a basic wrapper that all ACME handlers should leverage as the basis. -// This will create a basic ACME context, validate basic ACME configuration is setup -// for operations. This pulls in acmeErrorWrapper to translate error messages for users, -// but does not enforce any sort of ACME authentication. -func (b *backend) acmeWrapper(op acmeOperation) framework.OperationFunc { - return acmeErrorWrapper(func(ctx context.Context, r *logical.Request, data *framework.FieldData) (*logical.Response, error) { - sc := b.makeStorageContext(ctx, r.Storage) - - config, err := sc.Backend.acmeState.getConfigWithUpdate(sc) - if err != nil { - return nil, fmt.Errorf("failed to fetch ACME configuration: %w", err) - } - - // use string form in case someone messes up our config from raw storage. - eabPolicy, err := getEabPolicyByString(string(config.EabPolicyName)) - if err != nil { - return nil, err - } - - if isAcmeDisabled(sc, config, eabPolicy) { - return nil, ErrAcmeDisabled - } - - if b.useLegacyBundleCaStorage() { - return nil, fmt.Errorf("%w: Can not perform ACME operations until migration has completed", ErrServerInternal) - } - - acmeBaseUrl, clusterBase, err := getAcmeBaseUrl(sc, r) - if err != nil { - return nil, err - } - - role, issuer, err := getAcmeRoleAndIssuer(sc, data, config) - if err != nil { - return nil, err - } - - acmeDirectory, err := getAcmeDirectory(r) - if err != nil { - return nil, err - } - - acmeCtx := &acmeContext{ - baseUrl: acmeBaseUrl, - clusterUrl: clusterBase, - sc: sc, - role: role, - issuer: issuer, - acmeDirectory: acmeDirectory, - eabPolicy: eabPolicy, - } - - return op(acmeCtx, r, data) - }) -} - -// acmeParsedWrapper is an ACME wrapper that will parse out the ACME request parameters, validate -// that we have a proper signature and pass to the operation a decoded map of arguments received. -// This wrapper builds on top of acmeWrapper. Note that this does perform signature verification -// it does not enforce the account being in a valid state nor existing. -func (b *backend) acmeParsedWrapper(op acmeParsedOperation) framework.OperationFunc { - return b.acmeWrapper(func(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData) (*logical.Response, error) { - user, data, err := b.acmeState.ParseRequestParams(acmeCtx, r, fields) - if err != nil { - return nil, err - } - - resp, err := op(acmeCtx, r, fields, user, data) - - // Our response handlers might not add the necessary headers. - if resp != nil { - if resp.Headers == nil { - resp.Headers = map[string][]string{} - } - - if _, ok := resp.Headers["Replay-Nonce"]; !ok { - nonce, _, err := b.acmeState.GetNonce() - if err != nil { - return nil, err - } - - resp.Headers["Replay-Nonce"] = []string{nonce} - } - - if _, ok := resp.Headers["Link"]; !ok { - resp.Headers["Link"] = genAcmeLinkHeader(acmeCtx) - } else { - directory := genAcmeLinkHeader(acmeCtx)[0] - addDirectory := true - for _, item := range resp.Headers["Link"] { - if item == directory { - addDirectory = false - break - } - } - if addDirectory { - resp.Headers["Link"] = append(resp.Headers["Link"], directory) - } - } - - // ACME responses don't understand Vault's default encoding - // format. Rather than expecting everything to handle creating - // ACME-formatted responses, do the marshaling in one place. - if _, ok := resp.Data[logical.HTTPRawBody]; !ok { - ignored_values := map[string]bool{logical.HTTPContentType: true, logical.HTTPStatusCode: true} - fields := map[string]interface{}{} - body := map[string]interface{}{ - logical.HTTPContentType: "application/json", - logical.HTTPStatusCode: http.StatusOK, - } - - for key, value := range resp.Data { - if _, present := ignored_values[key]; !present { - fields[key] = value - } else { - body[key] = value - } - } - - rawBody, err := json.Marshal(fields) - if err != nil { - return nil, fmt.Errorf("Error marshaling JSON body: %w", err) - } - - body[logical.HTTPRawBody] = rawBody - resp.Data = body - } - } - - return resp, err - }) -} - -// acmeAccountRequiredWrapper builds on top of acmeParsedWrapper, enforcing the -// request has a proper signature for an existing account, and that account is -// in a valid status. It passes to the operation a decoded form of the request -// parameters as well as the ACME account the request is for. -func (b *backend) acmeAccountRequiredWrapper(op acmeAccountRequiredOperation) framework.OperationFunc { - return b.acmeParsedWrapper(func(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, uc *jwsCtx, data map[string]interface{}) (*logical.Response, error) { - if !uc.Existing { - return nil, fmt.Errorf("cannot process request without a 'kid': %w", ErrMalformed) - } - - account, err := requireValidAcmeAccount(acmeCtx, uc) - if err != nil { - return nil, err - } - - return op(acmeCtx, r, fields, uc, data, account) - }) -} - -func requireValidAcmeAccount(acmeCtx *acmeContext, uc *jwsCtx) (*acmeAccount, error) { - account, err := acmeCtx.getAcmeState().LoadAccount(acmeCtx, uc.Kid) - if err != nil { - return nil, fmt.Errorf("error loading account: %w", err) - } - - if err = acmeCtx.eabPolicy.EnforceForExistingAccount(account); err != nil { - return nil, err - } - - if account.Status != AccountStatusValid { - // Treating "revoked" and "deactivated" as the same here. - return nil, fmt.Errorf("%w: account in status: %s", ErrUnauthorized, account.Status) - } - return account, nil -} - -// A helper function that will build up the various path patterns we want for ACME APIs. -func buildAcmeFrameworkPaths(b *backend, patternFunc func(b *backend, pattern string) *framework.Path, acmeApi string) []*framework.Path { - var patterns []*framework.Path - for _, baseUrl := range []string{ - "acme", - "roles/" + framework.GenericNameRegex("role") + "/acme", - "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/acme", - "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/roles/" + framework.GenericNameRegex("role") + "/acme", - } { - - if !strings.HasPrefix(acmeApi, "/") { - acmeApi = "/" + acmeApi - } - - path := patternFunc(b, baseUrl+acmeApi) - patterns = append(patterns, path) - } - - return patterns -} - -func getAcmeBaseUrl(sc *storageContext, r *logical.Request) (*url.URL, *url.URL, error) { - baseUrl, err := getBasePathFromClusterConfig(sc) - if err != nil { - return nil, nil, err - } - - directoryPrefix, err := getAcmeDirectory(r) - if err != nil { - return nil, nil, err - } - - return baseUrl.JoinPath(directoryPrefix), baseUrl, nil -} - -func getBasePathFromClusterConfig(sc *storageContext) (*url.URL, error) { - cfg, err := sc.getClusterConfig() - if err != nil { - return nil, fmt.Errorf("failed loading cluster config: %w", err) - } - - if cfg.Path == "" { - return nil, fmt.Errorf("ACME feature requires local cluster 'path' field configuration to be set") - } - - baseUrl, err := url.Parse(cfg.Path) - if err != nil { - return nil, fmt.Errorf("failed parsing URL configured in local cluster 'path' configuration: %s: %s", - cfg.Path, err.Error()) - } - return baseUrl, nil -} - -func getAcmeIssuer(sc *storageContext, issuerName string) (*issuerEntry, error) { - if issuerName == "" { - issuerName = defaultRef - } - issuerId, err := sc.resolveIssuerReference(issuerName) - if err != nil { - return nil, fmt.Errorf("%w: issuer does not exist", ErrMalformed) - } - - issuer, err := sc.fetchIssuerById(issuerId) - if err != nil { - return nil, fmt.Errorf("issuer failed to load: %w", err) - } - - if issuer.Usage.HasUsage(IssuanceUsage) && len(issuer.KeyID) > 0 { - return issuer, nil - } - - return nil, fmt.Errorf("%w: issuer missing proper issuance usage or key", ErrServerInternal) -} - -// getAcmeDirectory return the base acme directory path, without a leading '/' and including -// the trailing /acme/ folder which is the root of all our various directories -func getAcmeDirectory(r *logical.Request) (string, error) { - acmePath := r.Path - if !strings.HasPrefix(acmePath, "/") { - acmePath = "/" + acmePath - } - - lastIndex := strings.LastIndex(acmePath, "/acme/") - if lastIndex == -1 { - return "", fmt.Errorf("%w: unable to determine acme base folder path: %s", ErrServerInternal, acmePath) - } - - // Skip the leading '/' and return our base path with the /acme/ - return strings.TrimLeft(acmePath[0:lastIndex]+"/acme/", "/"), nil -} - -func getAcmeRoleAndIssuer(sc *storageContext, data *framework.FieldData, config *acmeConfigEntry) (*roleEntry, *issuerEntry, error) { - requestedIssuer := getRequestedAcmeIssuerFromPath(data) - requestedRole := getRequestedAcmeRoleFromPath(data) - issuerToLoad := requestedIssuer - - var role *roleEntry - var err error - - if len(requestedRole) == 0 { // Default Directory - policyType, err := getDefaultDirectoryPolicyType(config.DefaultDirectoryPolicy) - if err != nil { - return nil, nil, err - } - switch policyType { - case Forbid: - return nil, nil, fmt.Errorf("%w: default directory not allowed by ACME policy", ErrServerInternal) - case SignVerbatim: - role = buildSignVerbatimRoleWithNoData(&roleEntry{ - Issuer: requestedIssuer, - NoStore: false, - Name: requestedRole, - }) - case Role: - defaultRole, err := getDefaultDirectoryPolicyRole(config.DefaultDirectoryPolicy) - if err != nil { - return nil, nil, err - } - role, err = getAndValidateAcmeRole(sc, defaultRole) - if err != nil { - return nil, nil, err - } - } - } else { // Requested Role - role, err = getAndValidateAcmeRole(sc, requestedRole) - if err != nil { - return nil, nil, err - } - - // Check the Requested Role is Allowed - allowAnyRole := len(config.AllowedRoles) == 1 && config.AllowedRoles[0] == "*" - if !allowAnyRole { - - var foundRole bool - for _, name := range config.AllowedRoles { - if name == role.Name { - foundRole = true - break - } - } - - if !foundRole { - return nil, nil, fmt.Errorf("%w: specified role not allowed by ACME policy", ErrServerInternal) - } - } - - } - - // If we haven't loaded an issuer directly from our path and the specified (or default) - // role does specify an issuer prefer the role's issuer rather than the default issuer. - if len(role.Issuer) > 0 && len(requestedIssuer) == 0 { - issuerToLoad = role.Issuer - } - - issuer, err := getAcmeIssuer(sc, issuerToLoad) - if err != nil { - return nil, nil, err - } - - allowAnyIssuer := len(config.AllowedIssuers) == 1 && config.AllowedIssuers[0] == "*" - if !allowAnyIssuer { - var foundIssuer bool - for index, name := range config.AllowedIssuers { - candidateId, err := sc.resolveIssuerReference(name) - if err != nil { - return nil, nil, fmt.Errorf("failed to resolve reference for allowed_issuer entry %d: %w", index, err) - } - - if candidateId == issuer.ID { - foundIssuer = true - break - } - } - - if !foundIssuer { - return nil, nil, fmt.Errorf("%w: specified issuer not allowed by ACME policy", ErrServerInternal) - } - } - - // Override ExtKeyUsage behavior to force it to only be ServerAuth within ACME issued certs - role.ExtKeyUsage = []string{"serverauth"} - role.ExtKeyUsageOIDs = []string{} - role.ServerFlag = true - role.ClientFlag = false - role.CodeSigningFlag = false - role.EmailProtectionFlag = false - - return role, issuer, nil -} - -func getAndValidateAcmeRole(sc *storageContext, requestedRole string) (*roleEntry, error) { - var err error - role, err := sc.Backend.getRole(sc.Context, sc.Storage, requestedRole) - if err != nil { - return nil, fmt.Errorf("%w: err loading role", ErrServerInternal) - } - - if role == nil { - return nil, fmt.Errorf("%w: role does not exist", ErrMalformed) - } - - if role.NoStore { - return nil, fmt.Errorf("%w: role can not be used as NoStore is set to true", ErrServerInternal) - } - - return role, nil -} - -func getRequestedAcmeRoleFromPath(data *framework.FieldData) string { - requestedRole := "" - roleNameRaw, present := data.GetOk("role") - if present { - requestedRole = roleNameRaw.(string) - } - return requestedRole -} - -func getRequestedAcmeIssuerFromPath(data *framework.FieldData) string { - requestedIssuer := "" - requestedIssuerRaw, present := data.GetOk(issuerRefParam) - if present { - requestedIssuer = requestedIssuerRaw.(string) - } - return requestedIssuer -} - -func isAcmeDisabled(sc *storageContext, config *acmeConfigEntry, policy EabPolicy) bool { - if !config.Enabled { - return true - } - - disableAcme, nonFatalErr := isPublicACMEDisabledByEnv() - if nonFatalErr != nil { - sc.Backend.Logger().Warn(fmt.Sprintf("could not parse env var '%s'", disableAcmeEnvVar), "error", nonFatalErr) - } - - // The OS environment if true will override any configuration option. - if disableAcme { - if policy.OverrideEnvDisablingPublicAcme() { - return false - } - return true - } - - return false -} diff --git a/builtin/logical/pki/acme_wrappers_test.go b/builtin/logical/pki/acme_wrappers_test.go deleted file mode 100644 index 4182066495c4fb..00000000000000 --- a/builtin/logical/pki/acme_wrappers_test.go +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "context" - "fmt" - "strings" - "testing" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" - "github.com/stretchr/testify/require" -) - -// TestACMEIssuerRoleLoading validates the role and issuer loading logic within the base -// ACME wrapper is correct. -func TestACMEIssuerRoleLoading(t *testing.T) { - b, s := CreateBackendWithStorage(t) - - _, err := CBWrite(b, s, "config/cluster", map[string]interface{}{ - "path": "http://localhost:8200/v1/pki", - "aia_path": "http://localhost:8200/cdn/pki", - }) - require.NoError(t, err) - - _, err = CBWrite(b, s, "config/acme", map[string]interface{}{ - "enabled": true, - }) - require.NoError(t, err) - - _, err = CBWrite(b, s, "root/generate/internal", map[string]interface{}{ - "common_name": "myvault1.com", - "issuer_name": "issuer-1", - "key_type": "ec", - }) - require.NoError(t, err, "failed creating issuer issuer-1") - - _, err = CBWrite(b, s, "root/generate/internal", map[string]interface{}{ - "common_name": "myvault2.com", - "issuer_name": "issuer-2", - "key_type": "ec", - }) - require.NoError(t, err, "failed creating issuer issuer-2") - - _, err = CBWrite(b, s, "roles/role-bad-issuer", map[string]interface{}{ - issuerRefParam: "non-existant", - "no_store": "false", - }) - require.NoError(t, err, "failed creating role role-bad-issuer") - - _, err = CBWrite(b, s, "roles/role-no-store-enabled", map[string]interface{}{ - issuerRefParam: "issuer-2", - "no_store": "true", - }) - require.NoError(t, err, "failed creating role role-no-store-enabled") - - _, err = CBWrite(b, s, "roles/role-issuer-2", map[string]interface{}{ - issuerRefParam: "issuer-2", - "no_store": "false", - }) - require.NoError(t, err, "failed creating role role-issuer-2") - - tc := []struct { - name string - roleName string - issuerName string - expectedIssuerName string - expectErr bool - }{ - {name: "pass-default-use-default", roleName: "", issuerName: "", expectedIssuerName: "issuer-1", expectErr: false}, - {name: "pass-role-issuer-2", roleName: "role-issuer-2", issuerName: "", expectedIssuerName: "issuer-2", expectErr: false}, - {name: "pass-issuer-1-no-role", roleName: "", issuerName: "issuer-1", expectedIssuerName: "issuer-1", expectErr: false}, - {name: "fail-role-has-bad-issuer", roleName: "role-bad-issuer", issuerName: "", expectedIssuerName: "", expectErr: true}, - {name: "fail-role-no-store-enabled", roleName: "role-no-store-enabled", issuerName: "", expectedIssuerName: "", expectErr: true}, - {name: "fail-role-no-store-enabled", roleName: "role-no-store-enabled", issuerName: "", expectedIssuerName: "", expectErr: true}, - {name: "fail-role-does-not-exist", roleName: "non-existant", issuerName: "", expectedIssuerName: "", expectErr: true}, - {name: "fail-issuer-does-not-exist", roleName: "", issuerName: "non-existant", expectedIssuerName: "", expectErr: true}, - } - - for _, tt := range tc { - t.Run(tt.name, func(t *testing.T) { - f := b.acmeWrapper(func(acmeCtx *acmeContext, r *logical.Request, _ *framework.FieldData) (*logical.Response, error) { - if tt.roleName != acmeCtx.role.Name { - return nil, fmt.Errorf("expected role %s but got %s", tt.roleName, acmeCtx.role.Name) - } - - if tt.expectedIssuerName != acmeCtx.issuer.Name { - return nil, fmt.Errorf("expected issuer %s but got %s", tt.expectedIssuerName, acmeCtx.issuer.Name) - } - - return nil, nil - }) - - var acmePath string - fieldRaw := map[string]interface{}{} - if tt.issuerName != "" { - fieldRaw[issuerRefParam] = tt.issuerName - acmePath = "issuer/" + tt.issuerName + "/" - } - if tt.roleName != "" { - fieldRaw["role"] = tt.roleName - acmePath = acmePath + "roles/" + tt.roleName + "/" - } - - acmePath = strings.TrimLeft(acmePath+"/acme/directory", "/") - - resp, err := f(context.Background(), &logical.Request{Path: acmePath, Storage: s}, &framework.FieldData{ - Raw: fieldRaw, - Schema: getCsrSignVerbatimSchemaFields(), - }) - require.NoError(t, err, "all errors should be re-encoded") - - if tt.expectErr { - require.NotEqual(t, 200, resp.Data[logical.HTTPStatusCode]) - require.Equal(t, ErrorContentType, resp.Data[logical.HTTPContentType]) - } else { - if resp != nil { - t.Fatalf("expected no error got %s", string(resp.Data[logical.HTTPRawBody].([]uint8))) - } - } - }) - } -} diff --git a/builtin/logical/pki/backend.go b/builtin/logical/pki/backend.go index 8caf2f6a2db90a..d54c2bfbcb5af2 100644 --- a/builtin/logical/pki/backend.go +++ b/builtin/logical/pki/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -12,24 +9,22 @@ import ( "sync/atomic" "time" + "github.com/hashicorp/vault/helper/constants" + + "github.com/hashicorp/go-multierror" + atomic2 "go.uber.org/atomic" + "github.com/hashicorp/vault/sdk/helper/consts" + "github.com/armon/go-metrics" - "github.com/hashicorp/go-multierror" - "github.com/hashicorp/vault/helper/constants" "github.com/hashicorp/vault/helper/metricsutil" "github.com/hashicorp/vault/helper/namespace" "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/logical" ) const ( - operationPrefixPKI = "pki" - operationPrefixPKIIssuer = "pki-issuer" - operationPrefixPKIIssuers = "pki-issuers" - operationPrefixPKIRoot = "pki-root" - noRole = 0 roleOptional = 1 roleRequired = 2 @@ -115,8 +110,6 @@ func Backend(conf *logical.BackendConfig) *backend { "unified-crl", "unified-ocsp", // Unified OCSP POST "unified-ocsp/*", // Unified OCSP GET - - // ACME paths are added below }, LocalStorage: []string{ @@ -126,7 +119,6 @@ func Backend(conf *logical.BackendConfig) *backend { clusterConfigPath, "crls/", "certs/", - acmePathPrefix, }, Root: []string{ @@ -215,11 +207,6 @@ func Backend(conf *logical.BackendConfig) *backend { // CRL Signing pathResignCrls(&b), pathSignRevocationList(&b), - - // ACME - pathAcmeConfig(&b), - pathAcmeEabList(&b), - pathAcmeEabDelete(&b), }, Secrets: []*framework.Secret{ @@ -230,45 +217,6 @@ func Backend(conf *logical.BackendConfig) *backend { InitializeFunc: b.initialize, Invalidate: b.invalidate, PeriodicFunc: b.periodicFunc, - Clean: b.cleanup, - } - - // Add ACME paths to backend - var acmePaths []*framework.Path - acmePaths = append(acmePaths, pathAcmeDirectory(&b)...) - acmePaths = append(acmePaths, pathAcmeNonce(&b)...) - acmePaths = append(acmePaths, pathAcmeNewAccount(&b)...) - acmePaths = append(acmePaths, pathAcmeUpdateAccount(&b)...) - acmePaths = append(acmePaths, pathAcmeGetOrder(&b)...) - acmePaths = append(acmePaths, pathAcmeListOrders(&b)...) - acmePaths = append(acmePaths, pathAcmeNewOrder(&b)...) - acmePaths = append(acmePaths, pathAcmeFinalizeOrder(&b)...) - acmePaths = append(acmePaths, pathAcmeFetchOrderCert(&b)...) - acmePaths = append(acmePaths, pathAcmeChallenge(&b)...) - acmePaths = append(acmePaths, pathAcmeAuthorization(&b)...) - acmePaths = append(acmePaths, pathAcmeRevoke(&b)...) - acmePaths = append(acmePaths, pathAcmeNewEab(&b)...) // auth'd API that lives underneath the various /acme paths - - for _, acmePath := range acmePaths { - b.Backend.Paths = append(b.Backend.Paths, acmePath) - } - - // Add specific un-auth'd paths for ACME APIs - for _, acmePrefix := range []string{"", "issuer/+/", "roles/+/", "issuer/+/roles/+/"} { - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/directory") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/new-nonce") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/new-account") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/new-order") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/revoke-cert") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/key-change") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/account/+") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/authorization/+") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/challenge/+/+") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/orders") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/order/+") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/order/+/finalize") - b.PathsSpecial.Unauthenticated = append(b.PathsSpecial.Unauthenticated, acmePrefix+"acme/order/+/cert") - // We specifically do NOT add acme/new-eab to this as it should be auth'd } if constants.IsEnterprise { @@ -303,18 +251,14 @@ func Backend(conf *logical.BackendConfig) *backend { b.lastTidy = time.Now() // Metrics initialization for count of certificates in storage - b.certCountEnabled = atomic2.NewBool(false) - b.publishCertCountMetrics = atomic2.NewBool(false) b.certsCounted = atomic2.NewBool(false) - b.certCountError = "Initialize Not Yet Run, Cert Counts Unavailable" - b.certCount = &atomic.Uint32{} - b.revokedCertCount = &atomic.Uint32{} + b.certCount = new(uint32) + b.revokedCertCount = new(uint32) b.possibleDoubleCountedSerials = make([]string, 0, 250) b.possibleDoubleCountedRevokedSerials = make([]string, 0, 250) b.unifiedTransferStatus = newUnifiedTransferStatus() - b.acmeState = NewACMEState() return &b } @@ -333,12 +277,9 @@ type backend struct { unifiedTransferStatus *unifiedTransferStatus - certCountEnabled *atomic2.Bool - publishCertCountMetrics *atomic2.Bool - certCount *atomic.Uint32 - revokedCertCount *atomic.Uint32 + certCount *uint32 + revokedCertCount *uint32 certsCounted *atomic2.Bool - certCountError string possibleDoubleCountedSerials []string possibleDoubleCountedRevokedSerials []string @@ -347,11 +288,6 @@ type backend struct { // Write lock around issuers and keys. issuersLock sync.RWMutex - - // Context around ACME operations - acmeState *acmeState - acmeAccountLock sync.RWMutex // (Write) Locked on Tidy, (Read) Locked on Account Creation - // TODO: Stress test this - eg. creating an order while an account is being revoked } type roleOperation func(ctx context.Context, req *logical.Request, data *framework.FieldData, role *roleEntry) (*logical.Response, error) @@ -432,26 +368,15 @@ func (b *backend) initialize(ctx context.Context, _ *logical.InitializationReque return err } - err = b.acmeState.Initialize(b, sc) - if err != nil { - return err - } - // Initialize also needs to populate our certificate and revoked certificate count err = b.initializeStoredCertificateCounts(ctx) if err != nil { - // Don't block/err initialize/startup for metrics. Context on this call can time out due to number of certificates. - b.Logger().Error("Could not initialize stored certificate counts", "error", err) - b.certCountError = err.Error() + return err } return nil } -func (b *backend) cleanup(_ context.Context) { - b.acmeState.validator.Closing <- struct{}{} -} - func (b *backend) initializePKIIssuersStorage(ctx context.Context) error { // Grab the lock prior to the updating of the storage lock preventing us flipping // the storage flag midway through the request stream of other requests. @@ -548,8 +473,6 @@ func (b *backend) invalidate(ctx context.Context, key string) { case key == "config/crl": // We may need to reload our OCSP status flag b.crlBuilder.markConfigDirty() - case key == storageAcmeConfig: - b.acmeState.markConfigDirty() case key == storageIssuerConfig: b.crlBuilder.invalidateCRLBuildTime() case strings.HasPrefix(key, crossRevocationPrefix): @@ -614,32 +537,16 @@ func (b *backend) periodicFunc(ctx context.Context, request *logical.Request) er } // Then attempt to rebuild the CRLs if required. - warnings, err := b.crlBuilder.rebuildIfForced(sc) - if err != nil { + if err := b.crlBuilder.rebuildIfForced(sc); err != nil { return err } - if len(warnings) > 0 { - msg := "During rebuild of complete CRL, got the following warnings:" - for index, warning := range warnings { - msg = fmt.Sprintf("%v\n %d. %v", msg, index+1, warning) - } - b.Logger().Warn(msg) - } // If a delta CRL was rebuilt above as part of the complete CRL rebuild, // this will be a no-op. However, if we do need to rebuild delta CRLs, // this would cause us to do so. - warnings, err = b.crlBuilder.rebuildDeltaCRLsIfForced(sc, false) - if err != nil { + if err := b.crlBuilder.rebuildDeltaCRLsIfForced(sc, false); err != nil { return err } - if len(warnings) > 0 { - msg := "During rebuild of delta CRL, got the following warnings:" - for index, warning := range warnings { - msg = fmt.Sprintf("%v\n %d. %v", msg, index+1, warning) - } - b.Logger().Warn(msg) - } return nil } @@ -695,24 +602,12 @@ func (b *backend) periodicFunc(ctx context.Context, request *logical.Request) er return nil } - // First tidy any ACME nonces to free memory. - b.acmeState.DoTidyNonces() - - // Then run unified transfer. backgroundSc := b.makeStorageContext(context.Background(), b.storage) go runUnifiedTransfer(backgroundSc) - // Then run the CRL rebuild and tidy operation. crlErr := doCRL() tidyErr := doAutoTidy() - // Periodically re-emit gauges so that they don't disappear/go stale - tidyConfig, err := sc.getAutoTidyConfig() - if err != nil { - return err - } - b.emitCertStoreMetrics(tidyConfig) - var errors error if crlErr != nil { errors = multierror.Append(errors, fmt.Errorf("Error building CRLs:\n - %w\n", crlErr)) @@ -737,51 +632,24 @@ func (b *backend) periodicFunc(ctx context.Context, request *logical.Request) er } func (b *backend) initializeStoredCertificateCounts(ctx context.Context) error { + b.tidyStatusLock.RLock() + defer b.tidyStatusLock.RUnlock() // For performance reasons, we can't lock on issuance/storage of certs until a list operation completes, // but we want to limit possible miscounts / double-counts to over-counting, so we take the tidy lock which // prevents (most) deletions - in particular we take a read lock (sufficient to block the write lock in // tidyStatusStart while allowing tidy to still acquire a read lock to report via its endpoint) - b.tidyStatusLock.RLock() - defer b.tidyStatusLock.RUnlock() - sc := b.makeStorageContext(ctx, b.storage) - config, err := sc.getAutoTidyConfig() - if err != nil { - return err - } - - b.certCountEnabled.Store(config.MaintainCount) - b.publishCertCountMetrics.Store(config.PublishMetrics) - - if config.MaintainCount == false { - b.possibleDoubleCountedRevokedSerials = nil - b.possibleDoubleCountedSerials = nil - b.certsCounted.Store(true) - b.certCount.Store(0) - b.revokedCertCount.Store(0) - b.certCountError = "Cert Count is Disabled: enable via Tidy Config maintain_stored_certificate_counts" - return nil - } - - // Ideally these three things would be set in one transaction, since that isn't possible, set the counts to "0", - // first, so count will over-count (and miss putting things in deduplicate queue), rather than under-count. - b.certCount.Store(0) - b.revokedCertCount.Store(0) - b.possibleDoubleCountedRevokedSerials = nil - b.possibleDoubleCountedSerials = nil - // A cert issued or revoked here will be double-counted. That's okay, this is "best effort" metrics. - b.certsCounted.Store(false) entries, err := b.storage.List(ctx, "certs/") if err != nil { return err } - b.certCount.Add(uint32(len(entries))) + atomic.AddUint32(b.certCount, uint32(len(entries))) revokedEntries, err := b.storage.List(ctx, "revoked/") if err != nil { return err } - b.revokedCertCount.Add(uint32(len(revokedEntries))) + atomic.AddUint32(b.revokedCertCount, uint32(len(revokedEntries))) b.certsCounted.Store(true) // Now that the metrics are set, we can switch from appending newly-stored certificates to the possible double-count @@ -862,98 +730,64 @@ func (b *backend) initializeStoredCertificateCounts(ctx context.Context) error { b.possibleDoubleCountedRevokedSerials = nil b.possibleDoubleCountedSerials = nil - b.emitCertStoreMetrics(config) - - b.certCountError = "" + certCount := atomic.LoadUint32(b.certCount) + metrics.SetGauge([]string{"secrets", "pki", b.backendUUID, "total_certificates_stored"}, float32(certCount)) + revokedCertCount := atomic.LoadUint32(b.revokedCertCount) + metrics.SetGauge([]string{"secrets", "pki", b.backendUUID, "total_revoked_certificates_stored"}, float32(revokedCertCount)) return nil } -func (b *backend) emitCertStoreMetrics(config *tidyConfig) { - if config.PublishMetrics == true { - certCount := b.certCount.Load() - b.emitTotalCertCountMetric(certCount) - revokedCertCount := b.revokedCertCount.Load() - b.emitTotalRevokedCountMetric(revokedCertCount) - } -} - // The "certsCounted" boolean here should be loaded from the backend certsCounted before the corresponding storage call: // eg. certsCounted := b.certsCounted.Load() -func (b *backend) ifCountEnabledIncrementTotalCertificatesCount(certsCounted bool, newSerial string) { - if b.certCountEnabled.Load() { - certCount := b.certCount.Add(1) - switch { - case !certsCounted: - // This is unsafe, but a good best-attempt - if strings.HasPrefix(newSerial, "certs/") { - newSerial = newSerial[6:] - } - b.possibleDoubleCountedSerials = append(b.possibleDoubleCountedSerials, newSerial) - default: - if b.publishCertCountMetrics.Load() { - b.emitTotalCertCountMetric(certCount) - } - } - } -} - -func (b *backend) ifCountEnabledDecrementTotalCertificatesCountReport() { - if b.certCountEnabled.Load() { - certCount := b.decrementTotalCertificatesCountNoReport() - if b.publishCertCountMetrics.Load() { - b.emitTotalCertCountMetric(certCount) +func (b *backend) incrementTotalCertificatesCount(certsCounted bool, newSerial string) { + certCount := atomic.AddUint32(b.certCount, 1) + switch { + case !certsCounted: + // This is unsafe, but a good best-attempt + if strings.HasPrefix(newSerial, "certs/") { + newSerial = newSerial[6:] } + b.possibleDoubleCountedSerials = append(b.possibleDoubleCountedSerials, newSerial) + default: + metrics.SetGauge([]string{"secrets", "pki", b.backendUUID, "total_certificates_stored"}, float32(certCount)) } } -func (b *backend) emitTotalCertCountMetric(certCount uint32) { +func (b *backend) decrementTotalCertificatesCountReport() { + certCount := b.decrementTotalCertificatesCountNoReport() metrics.SetGauge([]string{"secrets", "pki", b.backendUUID, "total_certificates_stored"}, float32(certCount)) } // Called directly only by the initialize function to deduplicate the count, when we don't have a full count yet -// Does not respect whether-we-are-counting backend information. func (b *backend) decrementTotalCertificatesCountNoReport() uint32 { - newCount := b.certCount.Add(^uint32(0)) + newCount := atomic.AddUint32(b.certCount, ^uint32(0)) return newCount } // The "certsCounted" boolean here should be loaded from the backend certsCounted before the corresponding storage call: // eg. certsCounted := b.certsCounted.Load() -func (b *backend) ifCountEnabledIncrementTotalRevokedCertificatesCount(certsCounted bool, newSerial string) { - if b.certCountEnabled.Load() { - newRevokedCertCount := b.revokedCertCount.Add(1) - switch { - case !certsCounted: - // This is unsafe, but a good best-attempt - if strings.HasPrefix(newSerial, "revoked/") { // allow passing in the path (revoked/serial) OR the serial - newSerial = newSerial[8:] - } - b.possibleDoubleCountedRevokedSerials = append(b.possibleDoubleCountedRevokedSerials, newSerial) - default: - if b.publishCertCountMetrics.Load() { - b.emitTotalRevokedCountMetric(newRevokedCertCount) - } - } - } -} - -func (b *backend) ifCountEnabledDecrementTotalRevokedCertificatesCountReport() { - if b.certCountEnabled.Load() { - revokedCertCount := b.decrementTotalRevokedCertificatesCountNoReport() - if b.publishCertCountMetrics.Load() { - b.emitTotalRevokedCountMetric(revokedCertCount) +func (b *backend) incrementTotalRevokedCertificatesCount(certsCounted bool, newSerial string) { + newRevokedCertCount := atomic.AddUint32(b.revokedCertCount, 1) + switch { + case !certsCounted: + // This is unsafe, but a good best-attempt + if strings.HasPrefix(newSerial, "revoked/") { // allow passing in the path (revoked/serial) OR the serial + newSerial = newSerial[8:] } + b.possibleDoubleCountedRevokedSerials = append(b.possibleDoubleCountedRevokedSerials, newSerial) + default: + metrics.SetGauge([]string{"secrets", "pki", b.backendUUID, "total_revoked_certificates_stored"}, float32(newRevokedCertCount)) } } -func (b *backend) emitTotalRevokedCountMetric(revokedCertCount uint32) { +func (b *backend) decrementTotalRevokedCertificatesCountReport() { + revokedCertCount := b.decrementTotalRevokedCertificatesCountNoReport() metrics.SetGauge([]string{"secrets", "pki", b.backendUUID, "total_revoked_certificates_stored"}, float32(revokedCertCount)) } // Called directly only by the initialize function to deduplicate the count, when we don't have a full count yet -// Does not respect whether-we-are-counting backend information. func (b *backend) decrementTotalRevokedCertificatesCountNoReport() uint32 { - newRevokedCertCount := b.revokedCertCount.Add(^uint32(0)) + newRevokedCertCount := atomic.AddUint32(b.revokedCertCount, ^uint32(0)) return newRevokedCertCount } diff --git a/builtin/logical/pki/backend_test.go b/builtin/logical/pki/backend_test.go index 6e779199363efb..9eb70617376458 100644 --- a/builtin/logical/pki/backend_test.go +++ b/builtin/logical/pki/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -30,6 +27,7 @@ import ( "strconv" "strings" "sync" + "sync/atomic" "testing" "time" @@ -37,8 +35,6 @@ import ( "github.com/hashicorp/vault/helper/testhelpers" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" - "github.com/stretchr/testify/require" "github.com/armon/go-metrics" @@ -137,10 +133,9 @@ func TestPKI_RequireCN(t *testing.T) { // Issue a cert with require_cn set to true and with common name supplied. // It should succeed. - resp, err = CBWrite(b, s, "issue/example", map[string]interface{}{ + _, err = CBWrite(b, s, "issue/example", map[string]interface{}{ "common_name": "foobar.com", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("issue/example"), logical.UpdateOperation), resp, true) if err != nil { t.Fatal(err) } @@ -560,6 +555,7 @@ func generateURLSteps(t *testing.T, caCert, caKey string, intdata, reqdata map[s if err != nil { return err } + if !reflect.DeepEqual(entries, expected) { return fmt.Errorf("expected urls\n%#v\ndoes not match provided\n%#v\n", expected, entries) } @@ -1887,7 +1883,6 @@ func TestBackend_PathFetchValidRaw(t *testing.T) { Data: map[string]interface{}{}, MountPoint: "pki/", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("ca/pem"), logical.ReadOperation), resp, true) require.NoError(t, err) if resp != nil && resp.IsError() { t.Fatalf("failed read ca/pem, %#v", resp) @@ -1993,7 +1988,6 @@ func TestBackend_PathFetchCertList(t *testing.T) { Data: rootData, MountPoint: "pki/", }) - if resp != nil && resp.IsError() { t.Fatalf("failed to generate root, %#v", resp) } @@ -2014,16 +2008,6 @@ func TestBackend_PathFetchCertList(t *testing.T) { Data: urlsData, MountPoint: "pki/", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/urls"), logical.UpdateOperation), resp, true) - - resp, err = b.HandleRequest(context.Background(), &logical.Request{ - Operation: logical.ReadOperation, - Path: "config/urls", - Storage: storage, - MountPoint: "pki/", - }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/urls"), logical.ReadOperation), resp, true) - if resp != nil && resp.IsError() { t.Fatalf("failed to config urls, %#v", resp) } @@ -2203,8 +2187,6 @@ func runTestSignVerbatim(t *testing.T, keyType string) { Data: signVerbatimData, MountPoint: "pki/", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("sign-verbatim"), logical.UpdateOperation), resp, true) - if resp != nil && resp.IsError() { t.Fatalf("failed to sign-verbatim basic CSR: %#v", *resp) } @@ -2438,7 +2420,6 @@ func TestBackend_Root_Idempotency(t *testing.T) { // Now because the issued CA's have no links, the call to ca_chain should return the same data (ca chain from default) resp, err = CBRead(b, s, "cert/ca_chain") require.NoError(t, err, "error reading ca_chain: %v", err) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("cert/ca_chain"), logical.ReadOperation), resp, true) r2Data := resp.Data if !reflect.DeepEqual(r1Data, r2Data) { @@ -2450,31 +2431,15 @@ func TestBackend_Root_Idempotency(t *testing.T) { resp, err = CBWrite(b, s, "config/ca", map[string]interface{}{ "pem_bundle": pemBundleRootCA, }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/ca"), logical.UpdateOperation), resp, true) - require.NoError(t, err) require.NotNil(t, resp, "expected ca info") - firstMapping := resp.Data["mapping"].(map[string]string) firstImportedKeys := resp.Data["imported_keys"].([]string) firstImportedIssuers := resp.Data["imported_issuers"].([]string) - firstExistingKeys := resp.Data["existing_keys"].([]string) - firstExistingIssuers := resp.Data["existing_issuers"].([]string) require.NotContains(t, firstImportedKeys, keyId1) require.NotContains(t, firstImportedKeys, keyId2) require.NotContains(t, firstImportedIssuers, issuerId1) require.NotContains(t, firstImportedIssuers, issuerId2) - require.Empty(t, firstExistingKeys) - require.Empty(t, firstExistingIssuers) - require.NotEmpty(t, firstMapping) - require.Equal(t, 1, len(firstMapping)) - - var issuerId3 string - var keyId3 string - for i, k := range firstMapping { - issuerId3 = i - keyId3 = k - } // Performing this again should result in no key/issuer ids being imported/generated. resp, err = CBWrite(b, s, "config/ca", map[string]interface{}{ @@ -2482,17 +2447,11 @@ func TestBackend_Root_Idempotency(t *testing.T) { }) require.NoError(t, err) require.NotNil(t, resp, "expected ca info") - secondMapping := resp.Data["mapping"].(map[string]string) secondImportedKeys := resp.Data["imported_keys"] secondImportedIssuers := resp.Data["imported_issuers"] - secondExistingKeys := resp.Data["existing_keys"] - secondExistingIssuers := resp.Data["existing_issuers"] - require.Empty(t, secondImportedKeys) - require.Empty(t, secondImportedIssuers) - require.Contains(t, secondExistingKeys, keyId3) - require.Contains(t, secondExistingIssuers, issuerId3) - require.Equal(t, 1, len(secondMapping)) + require.Nil(t, secondImportedKeys) + require.Nil(t, secondImportedIssuers) resp, err = CBDelete(b, s, "root") require.NoError(t, err) @@ -2532,7 +2491,7 @@ func TestBackend_Root_Idempotency(t *testing.T) { } } -func TestBackend_SignIntermediate_AllowedPastCAValidity(t *testing.T) { +func TestBackend_SignIntermediate_AllowedPastCA(t *testing.T) { t.Parallel() b_root, s_root := CreateBackendWithStorage(t) b_int, s_int := CreateBackendWithStorage(t) @@ -2550,7 +2509,6 @@ func TestBackend_SignIntermediate_AllowedPastCAValidity(t *testing.T) { _, err = CBWrite(b_root, s_root, "roles/test", map[string]interface{}{ "allow_bare_domains": true, "allow_subdomains": true, - "allow_any_name": true, }) if err != nil { t.Fatal(err) @@ -2559,7 +2517,6 @@ func TestBackend_SignIntermediate_AllowedPastCAValidity(t *testing.T) { resp, err := CBWrite(b_int, s_int, "intermediate/generate/internal", map[string]interface{}{ "common_name": "myint.com", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b_root.Route("intermediate/generate/internal"), logical.UpdateOperation), resp, true) require.Contains(t, resp.Data, "key_id") intKeyId := resp.Data["key_id"].(keyID) csr := resp.Data["csr"] @@ -2578,7 +2535,9 @@ func TestBackend_SignIntermediate_AllowedPastCAValidity(t *testing.T) { "csr": csr, "ttl": "60h", }) - require.ErrorContains(t, err, "that is beyond the expiration of the CA certificate") + if err == nil { + t.Fatal("expected error") + } _, err = CBWrite(b_root, s_root, "sign-verbatim/test", map[string]interface{}{ "common_name": "myint.com", @@ -2586,7 +2545,9 @@ func TestBackend_SignIntermediate_AllowedPastCAValidity(t *testing.T) { "csr": csr, "ttl": "60h", }) - require.ErrorContains(t, err, "that is beyond the expiration of the CA certificate") + if err == nil { + t.Fatal("expected error") + } resp, err = CBWrite(b_root, s_root, "root/sign-intermediate", map[string]interface{}{ "common_name": "myint.com", @@ -2744,7 +2705,6 @@ func TestBackend_SignSelfIssued(t *testing.T) { }, MountPoint: "pki/", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("root/sign-self-issued"), logical.UpdateOperation), resp, true) if err != nil { t.Fatal(err) } @@ -3868,15 +3828,6 @@ func TestBackend_RevokePlusTidy_Intermediate(t *testing.T) { t.Fatal(err) } - // Set up Metric Configuration, then restart to enable it - _, err = client.Logical().Write("pki/config/auto-tidy", map[string]interface{}{ - "maintain_stored_certificate_counts": true, - "publish_stored_certificate_count_metrics": true, - }) - _, err = client.Logical().Write("/sys/plugins/reload/backend", map[string]interface{}{ - "mounts": "pki/", - }) - // Check the metrics initialized in order to calculate backendUUID for /pki // BackendUUID not consistent during tests with UUID from /sys/mounts/pki metricsSuffix := "total_certificates_stored" @@ -3913,14 +3864,6 @@ func TestBackend_RevokePlusTidy_Intermediate(t *testing.T) { if err != nil { t.Fatal(err) } - // Set up Metric Configuration, then restart to enable it - _, err = client.Logical().Write("pki2/config/auto-tidy", map[string]interface{}{ - "maintain_stored_certificate_counts": true, - "publish_stored_certificate_count_metrics": true, - }) - _, err = client.Logical().Write("/sys/plugins/reload/backend", map[string]interface{}{ - "mounts": "pki2/", - }) // Create a CSR for the intermediate CA secret, err := client.Logical().Write("pki2/intermediate/generate/internal", nil) @@ -3946,7 +3889,6 @@ func TestBackend_RevokePlusTidy_Intermediate(t *testing.T) { if err != nil { t.Fatal(err) } - if secret == nil || len(secret.Data) == 0 || len(secret.Data["certificate"].(string)) == 0 { t.Fatal("expected certificate information from read operation") } @@ -4030,7 +3972,6 @@ func TestBackend_RevokePlusTidy_Intermediate(t *testing.T) { expectedData := map[string]interface{}{ "safety_buffer": json.Number("1"), "issuer_safety_buffer": json.Number("31536000"), - "revocation_queue_safety_buffer": json.Number("172800"), "tidy_cert_store": true, "tidy_revoked_certs": true, "tidy_revoked_cert_issuer_associations": false, @@ -4043,7 +3984,6 @@ func TestBackend_RevokePlusTidy_Intermediate(t *testing.T) { "error": nil, "time_started": nil, "time_finished": nil, - "last_auto_tidy_finished": nil, "message": nil, "cert_store_deleted_count": json.Number("1"), "revoked_cert_deleted_count": json.Number("1"), @@ -4052,13 +3992,6 @@ func TestBackend_RevokePlusTidy_Intermediate(t *testing.T) { "current_revoked_cert_count": json.Number("0"), "revocation_queue_deleted_count": json.Number("0"), "cross_revoked_cert_deleted_count": json.Number("0"), - "internal_backend_uuid": backendUUID, - "tidy_acme": false, - "acme_account_safety_buffer": json.Number("2592000"), - "acme_orders_deleted_count": json.Number("0"), - "acme_account_revoked_count": json.Number("0"), - "acme_account_deleted_count": json.Number("0"), - "total_acme_account_count": json.Number("0"), } // Let's copy the times from the response so that we can use deep.Equal() timeStarted, ok := tidyStatus.Data["time_started"] @@ -4071,7 +4004,6 @@ func TestBackend_RevokePlusTidy_Intermediate(t *testing.T) { t.Fatal("Expected tidy status response to include a value for time_finished") } expectedData["time_finished"] = timeFinished - expectedData["last_auto_tidy_finished"] = tidyStatus.Data["last_auto_tidy_finished"] if diff := deep.Equal(expectedData, tidyStatus.Data); diff != nil { t.Fatal(diff) @@ -4857,7 +4789,6 @@ func TestRootWithExistingKey(t *testing.T) { "key_type": "rsa", "issuer_name": "my-issuer1", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("issuers/generate/root/internal"), logical.UpdateOperation), resp, true) require.NoError(t, err) require.NotNil(t, resp.Data["certificate"]) myIssuerId1 := resp.Data["issuer_id"] @@ -4973,7 +4904,6 @@ func TestIntermediateWithExistingKey(t *testing.T) { "common_name": "root myvault.com", "key_type": "rsa", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("issuers/generate/intermediate/internal"), logical.UpdateOperation), resp, true) require.NoError(t, err) // csr1 := resp.Data["csr"] myKeyId1 := resp.Data["key_id"] @@ -5141,11 +5071,9 @@ func TestPerIssuerAIA(t *testing.T) { require.Empty(t, rootCert.CRLDistributionPoints) // Set some local URLs on the issuer. - resp, err = CBWrite(b, s, "issuer/default", map[string]interface{}{ + _, err = CBWrite(b, s, "issuer/default", map[string]interface{}{ "issuing_certificates": []string{"https://google.com"}, }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("issuer/default"), logical.UpdateOperation), resp, true) - require.NoError(t, err) _, err = CBWrite(b, s, "roles/testing", map[string]interface{}{ @@ -5267,7 +5195,6 @@ TgM7RZnmEjNdeaa4M52o7VY= resp, err := CBWrite(b, s, "issuers/import/bundle", map[string]interface{}{ "pem_bundle": customBundleWithoutCRLBits, }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("issuers/import/bundle"), logical.UpdateOperation), resp, true) require.NoError(t, err) require.NotNil(t, resp) require.NotEmpty(t, resp.Data) @@ -5309,8 +5236,7 @@ func TestBackend_IfModifiedSinceHeaders(t *testing.T) { }, } cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - RequestResponseCallback: schema.ResponseValidatingCallback(t), + HandlerFunc: vaulthttp.Handler, }) cluster.Start() defer cluster.Cleanup() @@ -5698,14 +5624,6 @@ func TestBackend_InitializeCertificateCounts(t *testing.T) { serials[i] = resp.Data["serial_number"].(string) } - // Turn on certificate counting: - CBWrite(b, s, "config/auto-tidy", map[string]interface{}{ - "maintain_stored_certificate_counts": true, - "publish_stored_certificate_count_metrics": false, - }) - // Assert initialize from clean is correct: - b.initializeStoredCertificateCounts(ctx) - // Revoke certificates A + B revocations := serials[0:2] for _, key := range revocations { @@ -5717,16 +5635,18 @@ func TestBackend_InitializeCertificateCounts(t *testing.T) { } } - if b.certCount.Load() != 6 { - t.Fatalf("Failed to count six certificates root,A,B,C,D,E, instead counted %d certs", b.certCount.Load()) + // Assert initialize from clean is correct: + b.initializeStoredCertificateCounts(ctx) + if atomic.LoadUint32(b.certCount) != 6 { + t.Fatalf("Failed to count six certificates root,A,B,C,D,E, instead counted %d certs", atomic.LoadUint32(b.certCount)) } - if b.revokedCertCount.Load() != 2 { - t.Fatalf("Failed to count two revoked certificates A+B, instead counted %d certs", b.revokedCertCount.Load()) + if atomic.LoadUint32(b.revokedCertCount) != 2 { + t.Fatalf("Failed to count two revoked certificates A+B, instead counted %d certs", atomic.LoadUint32(b.revokedCertCount)) } // Simulates listing while initialize in progress, by "restarting it" - b.certCount.Store(0) - b.revokedCertCount.Store(0) + atomic.StoreUint32(b.certCount, 0) + atomic.StoreUint32(b.revokedCertCount, 0) b.certsCounted.Store(false) // Revoke certificates C, D @@ -5755,12 +5675,12 @@ func TestBackend_InitializeCertificateCounts(t *testing.T) { b.initializeStoredCertificateCounts(ctx) // Test certificate count - if b.certCount.Load() != 8 { - t.Fatalf("Failed to initialize count of certificates root, A,B,C,D,E,F,G counted %d certs", b.certCount.Load()) + if atomic.LoadUint32(b.certCount) != 8 { + t.Fatalf("Failed to initialize count of certificates root, A,B,C,D,E,F,G counted %d certs", *(b.certCount)) } - if b.revokedCertCount.Load() != 4 { - t.Fatalf("Failed to count revoked certificates A,B,C,D counted %d certs", b.revokedCertCount.Load()) + if atomic.LoadUint32(b.revokedCertCount) != 4 { + t.Fatalf("Failed to count revoked certificates A,B,C,D counted %d certs", *(b.revokedCertCount)) } return @@ -6030,7 +5950,6 @@ func TestPKI_ListRevokedCerts(t *testing.T) { // Test empty cluster resp, err := CBList(b, s, "certs/revoked") - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("certs/revoked"), logical.ListOperation), resp, true) requireSuccessNonNilResponse(t, resp, err, "failed listing empty cluster") require.Empty(t, resp.Data, "response map contained data that we did not expect") @@ -6105,16 +6024,11 @@ func TestPKI_TemplatedAIAs(t *testing.T) { b, s := CreateBackendWithStorage(t) // Setting templated AIAs should succeed. - resp, err := CBWrite(b, s, "config/cluster", map[string]interface{}{ + _, err := CBWrite(b, s, "config/cluster", map[string]interface{}{ "path": "http://localhost:8200/v1/pki", "aia_path": "http://localhost:8200/cdn/pki", }) require.NoError(t, err) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/cluster"), logical.UpdateOperation), resp, true) - - resp, err = CBRead(b, s, "config/cluster") - require.NoError(t, err) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/cluster"), logical.ReadOperation), resp, true) aiaData := map[string]interface{}{ "crl_distribution_points": "{{cluster_path}}/issuer/{{issuer_id}}/crl/der", @@ -6143,7 +6057,7 @@ func TestPKI_TemplatedAIAs(t *testing.T) { "enable_templating": false, }) require.NoError(t, err) - resp, err = CBWrite(b, s, "root/generate/internal", rootData) + resp, err := CBWrite(b, s, "root/generate/internal", rootData) requireSuccessNonNilResponse(t, resp, err) issuerId := string(resp.Data["issuer_id"].(issuerID)) @@ -6455,7 +6369,6 @@ func TestUserIDsInLeafCerts(t *testing.T) { resp, err = CBWrite(b, s, "sign/testing", map[string]interface{}{ "csr": csrPem, }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("sign/testing"), logical.UpdateOperation), resp, true) requireSuccessNonNilResponse(t, resp, err, "failed issuing leaf cert") requireSubjectUserIDAttr(t, resp.Data["certificate"].(string), "humanoid") @@ -6744,7 +6657,7 @@ func TestProperAuthing(t *testing.T) { t.Fatal(err) } serial := resp.Data["serial_number"].(string) - eabKid := "13b80844-e60d-42d2-b7e9-152a8e834b90" + paths := map[string]pathAuthChecker{ "ca_chain": shouldBeUnauthedReadList, "cert/ca_chain": shouldBeUnauthedReadList, @@ -6769,7 +6682,6 @@ func TestProperAuthing(t *testing.T) { "certs/revoked": shouldBeAuthed, "certs/revocation-queue": shouldBeAuthed, "certs/unified-revoked": shouldBeAuthed, - "config/acme": shouldBeAuthed, "config/auto-tidy": shouldBeAuthed, "config/ca": shouldBeAuthed, "config/cluster": shouldBeAuthed, @@ -6862,29 +6774,7 @@ func TestProperAuthing(t *testing.T) { "unified-crl/delta/pem": shouldBeUnauthedReadList, "unified-ocsp": shouldBeUnauthedWriteOnly, "unified-ocsp/dGVzdAo=": shouldBeUnauthedReadList, - "eab": shouldBeAuthed, - "eab/" + eabKid: shouldBeAuthed, - } - - // Add ACME based paths to the test suite - for _, acmePrefix := range []string{"", "issuer/default/", "roles/test/", "issuer/default/roles/test/"} { - paths[acmePrefix+"acme/directory"] = shouldBeUnauthedReadList - paths[acmePrefix+"acme/new-nonce"] = shouldBeUnauthedReadList - paths[acmePrefix+"acme/new-account"] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/revoke-cert"] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/new-order"] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/orders"] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/account/hrKmDYTvicHoHGVN2-3uzZV_BPGdE0W_dNaqYTtYqeo="] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/authorization/29da8c38-7a09-465e-b9a6-3d76802b1afd"] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/challenge/29da8c38-7a09-465e-b9a6-3d76802b1afd/http-01"] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/order/13b80844-e60d-42d2-b7e9-152a8e834b90"] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/order/13b80844-e60d-42d2-b7e9-152a8e834b90/finalize"] = shouldBeUnauthedWriteOnly - paths[acmePrefix+"acme/order/13b80844-e60d-42d2-b7e9-152a8e834b90/cert"] = shouldBeUnauthedWriteOnly - - // Make sure this new-eab path is auth'd - paths[acmePrefix+"acme/new-eab"] = shouldBeAuthed } - for path, checkerType := range paths { checker := pathAuthChckerMap[checkerType] checker(t, client, "pki/"+path, token) @@ -6928,21 +6818,6 @@ func TestProperAuthing(t *testing.T) { if strings.Contains(raw_path, "{serial}") { raw_path = strings.ReplaceAll(raw_path, "{serial}", serial) } - if strings.Contains(raw_path, "acme/account/") && strings.Contains(raw_path, "{kid}") { - raw_path = strings.ReplaceAll(raw_path, "{kid}", "hrKmDYTvicHoHGVN2-3uzZV_BPGdE0W_dNaqYTtYqeo=") - } - if strings.Contains(raw_path, "acme/") && strings.Contains(raw_path, "{auth_id}") { - raw_path = strings.ReplaceAll(raw_path, "{auth_id}", "29da8c38-7a09-465e-b9a6-3d76802b1afd") - } - if strings.Contains(raw_path, "acme/") && strings.Contains(raw_path, "{challenge_type}") { - raw_path = strings.ReplaceAll(raw_path, "{challenge_type}", "http-01") - } - if strings.Contains(raw_path, "acme/") && strings.Contains(raw_path, "{order_id}") { - raw_path = strings.ReplaceAll(raw_path, "{order_id}", "13b80844-e60d-42d2-b7e9-152a8e834b90") - } - if strings.Contains(raw_path, "eab") && strings.Contains(raw_path, "{key_id}") { - raw_path = strings.ReplaceAll(raw_path, "{key_id}", eabKid) - } handler, present := paths[raw_path] if !present { diff --git a/builtin/logical/pki/ca_test.go b/builtin/logical/pki/ca_test.go index 7dbffef24774a2..9dc418c86ba70e 100644 --- a/builtin/logical/pki/ca_test.go +++ b/builtin/logical/pki/ca_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/ca_util.go b/builtin/logical/pki/ca_util.go index 85dc243e58bd48..c7dd02299a11f0 100644 --- a/builtin/logical/pki/ca_util.go +++ b/builtin/logical/pki/ca_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -264,61 +261,3 @@ func existingKeyGeneratorFromBytes(key *keyEntry) certutil.KeyGenerator { return nil } } - -func buildSignVerbatimRoleWithNoData(role *roleEntry) *roleEntry { - data := &framework.FieldData{ - Raw: map[string]interface{}{}, - Schema: addSignVerbatimRoleFields(map[string]*framework.FieldSchema{}), - } - return buildSignVerbatimRole(data, role) -} - -func buildSignVerbatimRole(data *framework.FieldData, role *roleEntry) *roleEntry { - entry := &roleEntry{ - AllowLocalhost: true, - AllowAnyName: true, - AllowIPSANs: true, - AllowWildcardCertificates: new(bool), - EnforceHostnames: false, - KeyType: "any", - UseCSRCommonName: true, - UseCSRSANs: true, - AllowedOtherSANs: []string{"*"}, - AllowedSerialNumbers: []string{"*"}, - AllowedURISANs: []string{"*"}, - AllowedUserIDs: []string{"*"}, - CNValidations: []string{"disabled"}, - GenerateLease: new(bool), - // If adding new fields to be read, update the field list within addSignVerbatimRoleFields - KeyUsage: data.Get("key_usage").([]string), - ExtKeyUsage: data.Get("ext_key_usage").([]string), - ExtKeyUsageOIDs: data.Get("ext_key_usage_oids").([]string), - SignatureBits: data.Get("signature_bits").(int), - UsePSS: data.Get("use_pss").(bool), - } - *entry.AllowWildcardCertificates = true - *entry.GenerateLease = false - - if role != nil { - if role.TTL > 0 { - entry.TTL = role.TTL - } - if role.MaxTTL > 0 { - entry.MaxTTL = role.MaxTTL - } - if role.GenerateLease != nil { - *entry.GenerateLease = *role.GenerateLease - } - if role.NotBeforeDuration > 0 { - entry.NotBeforeDuration = role.NotBeforeDuration - } - entry.NoStore = role.NoStore - entry.Issuer = role.Issuer - } - - if len(entry.Issuer) == 0 { - entry.Issuer = defaultRef - } - - return entry -} diff --git a/builtin/logical/pki/cert_util.go b/builtin/logical/pki/cert_util.go index 1795fcab8aac9a..45f315c946ba3e 100644 --- a/builtin/logical/pki/cert_util.go +++ b/builtin/logical/pki/cert_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -88,11 +85,6 @@ func getFormat(data *framework.FieldData) string { // loading using the legacyBundleShimID and should be used with care. This should be called only once // within the request path otherwise you run the risk of a race condition with the issuer migration on perf-secondaries. func (sc *storageContext) fetchCAInfo(issuerRef string, usage issuerUsage) (*certutil.CAInfoBundle, error) { - bundle, _, err := sc.fetchCAInfoWithIssuer(issuerRef, usage) - return bundle, err -} - -func (sc *storageContext) fetchCAInfoWithIssuer(issuerRef string, usage issuerUsage) (*certutil.CAInfoBundle, issuerID, error) { var issuerId issuerID if sc.Backend.useLegacyBundleCaStorage() { @@ -104,16 +96,11 @@ func (sc *storageContext) fetchCAInfoWithIssuer(issuerRef string, usage issuerUs issuerId, err = sc.resolveIssuerReference(issuerRef) if err != nil { // Usually a bad label from the user or mis-configured default. - return nil, IssuerRefNotFound, errutil.UserError{Err: err.Error()} + return nil, errutil.UserError{Err: err.Error()} } } - bundle, err := sc.fetchCAInfoByIssuerId(issuerId, usage) - if err != nil { - return nil, IssuerRefNotFound, err - } - - return bundle, issuerId, nil + return sc.fetchCAInfoByIssuerId(issuerId, usage) } // fetchCAInfoByIssuerId will fetch the CA info, will return an error if no ca info exists for the given issuerId. @@ -187,17 +174,9 @@ func fetchCertBySerial(sc *storageContext, prefix, serial string) (*logical.Stor legacyPath = "revoked/" + colonSerial path = "revoked/" + hyphenSerial case serial == legacyCRLPath || serial == deltaCRLPath || serial == unifiedCRLPath || serial == unifiedDeltaCRLPath: - warnings, err := sc.Backend.crlBuilder.rebuildIfForced(sc) - if err != nil { + if err = sc.Backend.crlBuilder.rebuildIfForced(sc); err != nil { return nil, err } - if len(warnings) > 0 { - msg := "During rebuild of CRL for cert fetch, got the following warnings:" - for index, warning := range warnings { - msg = fmt.Sprintf("%v\n %d. %v", msg, index+1, warning) - } - sc.Backend.Logger().Warn(msg) - } unified := serial == unifiedCRLPath || serial == unifiedDeltaCRLPath path, err = sc.resolveIssuerCRLPath(defaultRef, unified) @@ -255,9 +234,9 @@ func fetchCertBySerial(sc *storageContext, prefix, serial string) (*logical.Stor // If we fail here, we have an extra (copy) of a cert in storage, add to metrics: switch { case strings.HasPrefix(prefix, "revoked/"): - sc.Backend.ifCountEnabledIncrementTotalRevokedCertificatesCount(certsCounted, path) + sc.Backend.incrementTotalRevokedCertificatesCount(certsCounted, path) default: - sc.Backend.ifCountEnabledIncrementTotalCertificatesCount(certsCounted, path) + sc.Backend.incrementTotalCertificatesCount(certsCounted, path) } return nil, errutil.InternalError{Err: fmt.Sprintf("error deleting certificate with serial %s from old location", serial)} } @@ -336,71 +315,6 @@ func validateCommonName(b *backend, data *inputBundle, name string) string { return "" } -func isWildcardDomain(name string) bool { - // Per RFC 6125 Section 6.4.3, and explicitly contradicting the earlier - // RFC 2818 which no modern client will validate against, there are two - // main types of wildcards, each with a single wildcard specifier (`*`, - // functionally different from the `*` used as a glob from the - // AllowGlobDomains parsing path) in the left-most label: - // - // 1. Entire label is a single wildcard character (most common and - // well-supported), - // 2. Part of the label contains a single wildcard character (e.g. per - // RFC 6125: baz*.example.net, *baz.example.net, or b*z.example.net). - // - // We permit issuance of both but not the older RFC 2818 style under - // the new AllowWildcardCertificates option. However, anything with a - // glob character is technically a wildcard, though not a valid one. - - return strings.Contains(name, "*") -} - -func validateWildcardDomain(name string) (string, string, error) { - // See note in isWildcardDomain(...) about the definition of a wildcard - // domain. - var wildcardLabel string - var reducedName string - - if strings.Count(name, "*") > 1 { - // As mentioned above, only one wildcard character is permitted - // under RFC 6125 semantics. - return wildcardLabel, reducedName, fmt.Errorf("expected only one wildcard identifier in the given domain name") - } - - // Split the Common Name into two parts: a left-most label and the - // remaining segments (if present). - splitLabels := strings.SplitN(name, ".", 2) - if len(splitLabels) != 2 { - // We've been given a single-part domain name that consists - // entirely of a wildcard. This is a little tricky to handle, - // but EnforceHostnames validates both the wildcard-containing - // label and the reduced name, but _only_ the latter if it is - // non-empty. This allows us to still validate the only label - // component matches hostname expectations still. - wildcardLabel = splitLabels[0] - reducedName = "" - } else { - // We have a (at least) two label domain name. But before we can - // update our names, we need to validate the wildcard ended up - // in the segment we expected it to. While this is (kinda) - // validated under EnforceHostnames's leftWildLabelRegex, we - // still need to validate it in the non-enforced mode. - // - // By validated assumption above, we know there's strictly one - // wildcard in this domain so we only need to check the wildcard - // label or the reduced name (as one is equivalent to the other). - // Because we later assume reducedName _lacks_ wildcard segments, - // we validate that. - wildcardLabel = splitLabels[0] - reducedName = splitLabels[1] - if strings.Contains(reducedName, "*") { - return wildcardLabel, reducedName, fmt.Errorf("expected wildcard to only be present in left-most domain label") - } - } - - return wildcardLabel, reducedName, nil -} - // Given a set of requested names for a certificate, verifies that all of them // match the various toggles set in the role for controlling issuance. // If one does not pass, it is returned in the string argument. @@ -435,7 +349,21 @@ func validateNames(b *backend, data *inputBundle, names []string) string { isEmail = true } - if isWildcardDomain(reducedName) { + // Per RFC 6125 Section 6.4.3, and explicitly contradicting the earlier + // RFC 2818 which no modern client will validate against, there are two + // main types of wildcards, each with a single wildcard specifier (`*`, + // functionally different from the `*` used as a glob from the + // AllowGlobDomains parsing path) in the left-most label: + // + // 1. Entire label is a single wildcard character (most common and + // well-supported), + // 2. Part of the label contains a single wildcard character (e.g. per + /// RFC 6125: baz*.example.net, *baz.example.net, or b*z.example.net). + // + // We permit issuance of both but not the older RFC 2818 style under + // the new AllowWildcardCertificates option. However, anything with a + // glob character is technically a wildcard. + if strings.Contains(reducedName, "*") { // Regardless of later rejections below, this common name contains // a wildcard character and is thus technically a wildcard name. isWildcard = true @@ -450,12 +378,42 @@ func validateNames(b *backend, data *inputBundle, names []string) string { return name } - // Check that this domain is well-formatted per RFC 6125. - var err error - wildcardLabel, reducedName, err = validateWildcardDomain(reducedName) - if err != nil { + if strings.Count(reducedName, "*") > 1 { + // As mentioned above, only one wildcard character is permitted + // under RFC 6125 semantics. return name } + + // Split the Common Name into two parts: a left-most label and the + // remaining segments (if present). + splitLabels := strings.SplitN(reducedName, ".", 2) + if len(splitLabels) != 2 { + // We've been given a single-part domain name that consists + // entirely of a wildcard. This is a little tricky to handle, + // but EnforceHostnames validates both the wildcard-containing + // label and the reduced name, but _only_ the latter if it is + // non-empty. This allows us to still validate the only label + // component matches hostname expectations still. + wildcardLabel = splitLabels[0] + reducedName = "" + } else { + // We have a (at least) two label domain name. But before we can + // update our names, we need to validate the wildcard ended up + // in the segment we expected it to. While this is (kinda) + // validated under EnforceHostnames's leftWildLabelRegex, we + // still need to validate it in the non-enforced mode. + // + // By validated assumption above, we know there's strictly one + // wildcard in this domain so we only need to check the wildcard + // label or the reduced name (as one is equivalent to the other). + // Because we later assume reducedName _lacks_ wildcard segments, + // we validate that. + wildcardLabel = splitLabels[0] + reducedName = splitLabels[1] + if strings.Contains(reducedName, "*") { + return name + } + } } // Email addresses using wildcard domain names do not make sense @@ -1002,12 +960,6 @@ func signCert(b *backend, if isCA { creation.Params.PermittedDNSDomains = data.apiData.Get("permitted_dns_domains").([]string) - } else { - for _, ext := range csr.Extensions { - if ext.Id.Equal(certutil.ExtensionBasicConstraintsOID) { - warnings = append(warnings, "specified CSR contained a Basic Constraints extension that was ignored during issuance") - } - } } parsedBundle, err := certutil.SignCertificate(creation) @@ -1379,11 +1331,71 @@ func generateCreationBundle(b *backend, data *inputBundle, caSign *certutil.CAIn } // Get the TTL and verify it against the max allowed - notAfter, ttlWarnings, err := getCertificateNotAfter(b, data, caSign) - if err != nil { - return nil, warnings, err + var ttl time.Duration + var maxTTL time.Duration + var notAfter time.Time + var err error + { + ttl = time.Duration(data.apiData.Get("ttl").(int)) * time.Second + notAfterAlt := data.role.NotAfter + if notAfterAlt == "" { + notAfterAltRaw, ok := data.apiData.GetOk("not_after") + if ok { + notAfterAlt = notAfterAltRaw.(string) + } + + } + if ttl > 0 && notAfterAlt != "" { + return nil, nil, errutil.UserError{ + Err: "Either ttl or not_after should be provided. Both should not be provided in the same request.", + } + } + + if ttl == 0 && data.role.TTL > 0 { + ttl = data.role.TTL + } + + if data.role.MaxTTL > 0 { + maxTTL = data.role.MaxTTL + } + + if ttl == 0 { + ttl = b.System().DefaultLeaseTTL() + } + if maxTTL == 0 { + maxTTL = b.System().MaxLeaseTTL() + } + if ttl > maxTTL { + warnings = append(warnings, fmt.Sprintf("TTL %q is longer than permitted maxTTL %q, so maxTTL is being used", ttl, maxTTL)) + ttl = maxTTL + } + + if notAfterAlt != "" { + notAfter, err = time.Parse(time.RFC3339, notAfterAlt) + if err != nil { + return nil, nil, errutil.UserError{Err: err.Error()} + } + } else { + notAfter = time.Now().Add(ttl) + } + if caSign != nil && notAfter.After(caSign.Certificate.NotAfter) { + // If it's not self-signed, verify that the issued certificate + // won't be valid past the lifetime of the CA certificate, and + // act accordingly. This is dependent based on the issuer's + // LeafNotAfterBehavior argument. + switch caSign.LeafNotAfterBehavior { + case certutil.PermitNotAfterBehavior: + // Explicitly do nothing. + case certutil.TruncateNotAfterBehavior: + notAfter = caSign.Certificate.NotAfter + case certutil.ErrNotAfterBehavior: + fallthrough + default: + return nil, nil, errutil.UserError{Err: fmt.Sprintf( + "cannot satisfy request, as TTL would result in notAfter of %s that is beyond the expiration of the CA certificate at %s", notAfter.UTC().Format(time.RFC3339Nano), caSign.Certificate.NotAfter.UTC().Format(time.RFC3339Nano))} + } + } } - warnings = append(warnings, ttlWarnings...) // Parse SKID from the request for cross-signing. var skid []byte @@ -1499,73 +1511,6 @@ func generateCreationBundle(b *backend, data *inputBundle, caSign *certutil.CAIn return creation, warnings, nil } -// getCertificateNotAfter compute a certificate's NotAfter date based on the mount ttl, role, signing bundle and input -// api data being sent. Returns a NotAfter time, a set of warnings or an error. -func getCertificateNotAfter(b *backend, data *inputBundle, caSign *certutil.CAInfoBundle) (time.Time, []string, error) { - var warnings []string - var maxTTL time.Duration - var notAfter time.Time - var err error - - ttl := time.Duration(data.apiData.Get("ttl").(int)) * time.Second - notAfterAlt := data.role.NotAfter - if notAfterAlt == "" { - notAfterAltRaw, ok := data.apiData.GetOk("not_after") - if ok { - notAfterAlt = notAfterAltRaw.(string) - } - } - if ttl > 0 && notAfterAlt != "" { - return time.Time{}, warnings, errutil.UserError{Err: "Either ttl or not_after should be provided. Both should not be provided in the same request."} - } - - if ttl == 0 && data.role.TTL > 0 { - ttl = data.role.TTL - } - - if data.role.MaxTTL > 0 { - maxTTL = data.role.MaxTTL - } - - if ttl == 0 { - ttl = b.System().DefaultLeaseTTL() - } - if maxTTL == 0 { - maxTTL = b.System().MaxLeaseTTL() - } - if ttl > maxTTL { - warnings = append(warnings, fmt.Sprintf("TTL %q is longer than permitted maxTTL %q, so maxTTL is being used", ttl, maxTTL)) - ttl = maxTTL - } - - if notAfterAlt != "" { - notAfter, err = time.Parse(time.RFC3339, notAfterAlt) - if err != nil { - return notAfter, warnings, errutil.UserError{Err: err.Error()} - } - } else { - notAfter = time.Now().Add(ttl) - } - if caSign != nil && notAfter.After(caSign.Certificate.NotAfter) { - // If it's not self-signed, verify that the issued certificate - // won't be valid past the lifetime of the CA certificate, and - // act accordingly. This is dependent based on the issuer's - // LeafNotAfterBehavior argument. - switch caSign.LeafNotAfterBehavior { - case certutil.PermitNotAfterBehavior: - // Explicitly do nothing. - case certutil.TruncateNotAfterBehavior: - notAfter = caSign.Certificate.NotAfter - case certutil.ErrNotAfterBehavior: - fallthrough - default: - return time.Time{}, warnings, errutil.UserError{Err: fmt.Sprintf( - "cannot satisfy request, as TTL would result in notAfter of %s that is beyond the expiration of the CA certificate at %s", notAfter.UTC().Format(time.RFC3339Nano), caSign.Certificate.NotAfter.UTC().Format(time.RFC3339Nano))} - } - } - return notAfter, warnings, nil -} - func convertRespToPKCS8(resp *logical.Response) error { privRaw, ok := resp.Data["private_key"] if !ok { diff --git a/builtin/logical/pki/cert_util_test.go b/builtin/logical/pki/cert_util_test.go index 7fb811cb8fcfe0..de9c70ee05973a 100644 --- a/builtin/logical/pki/cert_util_test.go +++ b/builtin/logical/pki/cert_util_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/chain_test.go b/builtin/logical/pki/chain_test.go index e76df359e9ed1b..e6057f261ba03d 100644 --- a/builtin/logical/pki/chain_test.go +++ b/builtin/logical/pki/chain_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/chain_util.go b/builtin/logical/pki/chain_util.go index e884f075588ee9..6b7a6a5c7bebda 100644 --- a/builtin/logical/pki/chain_util.go +++ b/builtin/logical/pki/chain_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/cmd/pki/main.go b/builtin/logical/pki/cmd/pki/main.go index 7c804be23713da..ffcb4521c89588 100644 --- a/builtin/logical/pki/cmd/pki/main.go +++ b/builtin/logical/pki/cmd/pki/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: pki.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/logical/pki/config_util.go b/builtin/logical/pki/config_util.go index 80814550c75315..1c70c47c5f211c 100644 --- a/builtin/logical/pki/config_util.go +++ b/builtin/logical/pki/config_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/crl_test.go b/builtin/logical/pki/crl_test.go index aaa67ba77d805e..a5f83b2f88a120 100644 --- a/builtin/logical/pki/crl_test.go +++ b/builtin/logical/pki/crl_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -12,8 +9,6 @@ import ( "testing" "time" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" - "github.com/hashicorp/vault/api" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/logical" @@ -95,11 +90,8 @@ func TestBackend_CRLConfig(t *testing.T) { "auto_rebuild_grace_period": tc.autoRebuildGracePeriod, }) requireSuccessNonNilResponse(t, resp, err) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/crl"), logical.UpdateOperation), resp, true) resp, err = CBRead(b, s, "config/crl") - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/crl"), logical.ReadOperation), resp, true) - requireSuccessNonNilResponse(t, resp, err) requireFieldsSetInResp(t, resp, "disable", "expiry", "ocsp_disable", "auto_rebuild", "auto_rebuild_grace_period") @@ -415,18 +407,15 @@ func TestCrlRebuilder(t *testing.T) { cb := newCRLBuilder(true /* can rebuild and write CRLs */) // Force an initial build - warnings, err := cb.rebuild(sc, true) + err = cb.rebuild(sc, true) require.NoError(t, err, "Failed to rebuild CRL") - require.Empty(t, warnings, "unexpectedly got warnings rebuilding CRL") resp := requestCrlFromBackend(t, s, b) crl1 := parseCrlPemBytes(t, resp.Data["http_raw_body"].([]byte)) // We shouldn't rebuild within this call. - warnings, err = cb.rebuildIfForced(sc) + err = cb.rebuildIfForced(sc) require.NoError(t, err, "Failed to rebuild if forced CRL") - require.Empty(t, warnings, "unexpectedly got warnings rebuilding CRL") - resp = requestCrlFromBackend(t, s, b) crl2 := parseCrlPemBytes(t, resp.Data["http_raw_body"].([]byte)) require.Equal(t, crl1.ThisUpdate, crl2.ThisUpdate, "According to the update field, we rebuilt the CRL") @@ -442,12 +431,9 @@ func TestCrlRebuilder(t *testing.T) { // This should rebuild the CRL cb.requestRebuildIfActiveNode(b) - warnings, err = cb.rebuildIfForced(sc) + err = cb.rebuildIfForced(sc) require.NoError(t, err, "Failed to rebuild if forced CRL") - require.Empty(t, warnings, "unexpectedly got warnings rebuilding CRL") resp = requestCrlFromBackend(t, s, b) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("crl/pem"), logical.ReadOperation), resp, true) - crl3 := parseCrlPemBytes(t, resp.Data["http_raw_body"].([]byte)) require.True(t, crl1.ThisUpdate.Before(crl3.ThisUpdate), "initial crl time: %#v not before next crl rebuild time: %#v", crl1.ThisUpdate, crl3.ThisUpdate) @@ -607,11 +593,10 @@ func TestPoP(t *testing.T) { require.NotNil(t, resp) require.NotEmpty(t, resp.Data["certificate"]) - resp, err = CBWrite(b, s, "revoke-with-key", map[string]interface{}{ + _, err = CBWrite(b, s, "revoke-with-key", map[string]interface{}{ "certificate": resp.Data["certificate"], "private_key": resp.Data["private_key"], }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("revoke-with-key"), logical.UpdateOperation), resp, true) require.NoError(t, err) // Issue a second leaf, but hold onto it for now. @@ -781,16 +766,12 @@ func TestIssuerRevocation(t *testing.T) { // Revoke it. resp, err = CBWrite(b, s, "issuer/root2/revoke", map[string]interface{}{}) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("issuer/root2/revoke"), logical.UpdateOperation), resp, true) - require.NoError(t, err) require.NotNil(t, resp) require.NotZero(t, resp.Data["revocation_time"]) // Regenerate the CRLs - resp, err = CBRead(b, s, "crl/rotate") - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("crl/rotate"), logical.ReadOperation), resp, true) - + _, err = CBRead(b, s, "crl/rotate") require.NoError(t, err) // Ensure the old cert isn't on its own CRL. @@ -815,7 +796,7 @@ func TestIssuerRevocation(t *testing.T) { require.NoError(t, err) // Issue a leaf cert and ensure it fails (because the issuer is revoked). - resp, err = CBWrite(b, s, "issuer/root2/issue/local-testing", map[string]interface{}{ + _, err = CBWrite(b, s, "issuer/root2/issue/local-testing", map[string]interface{}{ "common_name": "testing", }) require.Error(t, err) @@ -841,8 +822,6 @@ func TestIssuerRevocation(t *testing.T) { resp, err = CBWrite(b, s, "intermediate/set-signed", map[string]interface{}{ "certificate": intCert, }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("intermediate/set-signed"), logical.UpdateOperation), resp, true) - require.NoError(t, err) require.NotNil(t, resp) require.NotEmpty(t, resp.Data["imported_issuers"]) @@ -858,8 +837,6 @@ func TestIssuerRevocation(t *testing.T) { resp, err = CBWrite(b, s, "issuer/int1/issue/local-testing", map[string]interface{}{ "common_name": "testing", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("issuer/int1/issue/local-testing"), logical.UpdateOperation), resp, true) - require.NoError(t, err) require.NotNil(t, resp) require.NotEmpty(t, resp.Data["certificate"]) @@ -1044,7 +1021,13 @@ func TestAutoRebuild(t *testing.T) { // each revocation. Pull the storage from the cluster (via the sys/raw // endpoint which requires the mount UUID) and verify the revInfo contains // a matching issuer. - pkiMount := findStorageMountUuid(t, client, "pki") + resp, err = client.Logical().Read("sys/mounts/pki") + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + require.NotEmpty(t, resp.Data["uuid"]) + pkiMount := resp.Data["uuid"].(string) + require.NotEmpty(t, pkiMount) revEntryPath := "logical/" + pkiMount + "/" + revokedPath + normalizeSerial(newLeafSerial) // storage from cluster.Core[0] is a physical storage copy, not a logical @@ -1172,17 +1155,6 @@ func TestAutoRebuild(t *testing.T) { requireSerialNumberInCRL(t, crl, newLeafSerial) } -func findStorageMountUuid(t *testing.T, client *api.Client, mount string) string { - resp, err := client.Logical().Read("sys/mounts/" + mount) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["uuid"]) - pkiMount := resp.Data["uuid"].(string) - require.NotEmpty(t, pkiMount) - return pkiMount -} - func TestTidyIssuerAssociation(t *testing.T) { t.Parallel() @@ -1334,106 +1306,3 @@ func requestCrlFromBackend(t *testing.T, s logical.Storage, b *backend) *logical require.False(t, resp.IsError(), "crl error response: %v", resp) return resp } - -func TestCRLWarningsEmptyKeyUsage(t *testing.T) { - t.Parallel() - - b, s := CreateBackendWithStorage(t) - - // Generated using OpenSSL with a configuration lacking KeyUsage on - // the CA certificate. - cert := `-----BEGIN CERTIFICATE----- -MIIDBjCCAe6gAwIBAgIBATANBgkqhkiG9w0BAQsFADATMREwDwYDVQQDDAhyb290 -LW9sZDAeFw0yMDAxMDEwMTAxMDFaFw0yMTAxMDEwMTAxMDFaMBMxETAPBgNVBAMM -CHJvb3Qtb2xkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzqhSZxAL -PwFhCIPL1jFPq6jxp1wFgo6YNSfVI13gfaGIjfErxsQUbosmlEuTeOc50zXXN3kb -SDufy5Yi1OeSkFZRdJ78zdKzsEDIVR1ukUngVsSrt05gdNMJlh8XOPbcrJo78jYG -lRgtkkFSc/wCu+ue6JqkfKrbUY/G9WK0UM8ppHm1Ux67ZGoypyEgaqqxKHBRC4Yl -D+lAs1vP4C6cavqdUMKgAPTKmMBzlbpCuYPLHSzWh9Com3WQSqCbrlo3uH5RT3V9 -5Gjuk3mMUhY1l6fRL7wG3f+4x+DS+ICQNT0o4lnMxpIsiTh0cEHUFgY7G0iHWYPj -CIN8UDhpZIpoCQIDAQABo2UwYzAdBgNVHQ4EFgQUJlHk3PN7pfC22FGxAb0rWgQt -L4cwHwYDVR0jBBgwFoAUJlHk3PN7pfC22FGxAb0rWgQtL4cwDAYDVR0TBAUwAwEB -/zATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0BAQsFAAOCAQEAcaU0FbXb -FfXluBrjKfOzVKz+kvQ1CVv3xe3MBkS6wvqybBjJCFChnqCPxEe57BdSbBXNU5LZ -zCR/OqYas4Csv9+msSn9BI2FSMAmfMDTsp5/6iIQJqlJx9L8a7bjzVMGX6QJm/3x -S/EgGsMETAgewQXeu4jhI6StgJ2V/4Ofe498hYw4LAiBapJmkU/nHezWodNBZJ7h -LcLOzVj0Hu5MZplGBgJFgRqBCVVkqXA0q7tORuhNzYtNdJFpv3pZIhvVFFu3HUPf -wYQPhLye5WNtosz5xKe8X0Q9qp8g6azMTk+5Qe7u1d8MYAA2AIlGuKUvPHRruOmN -NC+gQnS7AK1lCw== ------END CERTIFICATE-----` - privKey := `-----BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDOqFJnEAs/AWEI -g8vWMU+rqPGnXAWCjpg1J9UjXeB9oYiN8SvGxBRuiyaUS5N45znTNdc3eRtIO5/L -liLU55KQVlF0nvzN0rOwQMhVHW6RSeBWxKu3TmB00wmWHxc49tysmjvyNgaVGC2S -QVJz/AK7657omqR8qttRj8b1YrRQzymkebVTHrtkajKnISBqqrEocFELhiUP6UCz -W8/gLpxq+p1QwqAA9MqYwHOVukK5g8sdLNaH0KibdZBKoJuuWje4flFPdX3kaO6T -eYxSFjWXp9EvvAbd/7jH4NL4gJA1PSjiWczGkiyJOHRwQdQWBjsbSIdZg+MIg3xQ -OGlkimgJAgMBAAECggEABKmCdmXDwy+eR0ll41aoc/hzPzHRxADAiU51Pf+DrYHj -6UPcF3db+KR2Adl0ocEhqlSoHs3CIk6KC9c+wOvagBwaaVWe4WvT9vF3M4he8rMm -dv6n2xJPFcOfDz5zUSssjk5KdOvoGRv7BzYnDIvOafvmUVwPwuo92Wizddy8saf4 -Xuea0Cupz1PELPKkbXcAqb+TzbAZrwdPj1Y7vTe/KGE4+aoDqCW/sFB1E0UsMGlt -/yfGwFP48b7kdkqSpcEQW5H8+WL3TfqRcolCD9To4vo2J+1Po0S/8qPNRvkNQDDX -AypHtrXFBOWHpJgXT4rKyH+ZGJchrCRDblt9s/sNQwKBgQD7NytvYET3pWemYiX+ -MB9uc6cPuMFONvlzjA9T6dbOSi/HLaeDoW027aMUZqb7QeaQCoWcUwh13dI2SZq0 -5+l9hei4JkWjoDhbWmPe7zDuQr3UMl0CSk3egz3BSHkjAhRAuUxK0QLKGB23zWxz -k8mUWYZaZRA39C6aqMt/jbJjDwKBgQDSl+eO+DjpwPzrjPSphpF4xYo4XDje9ovK -9q4KTHye7Flc3cMCX3WZBmzdt0zbqu6gWZjJH0XbWX/+SkJBGh77XWD0FeQnU7Vk -ipoeb8zTsCVxD9EytQuXti3cqBgClcCMvLKgLOJIcNYTnygojwg3t+jboQqbtV7p -VpQfAC6jZwKBgQCxJ46x1CnOmg4l/0DbqAQCV/yP0bI//fSbz0Ff459fimF3DHL9 -GHF0MtC2Kk3HEgoNud3PB58Hv43mSrGWsZSuuCgM9LBXWz1i7rNPG05eNyK26W09 -mDihmduK2hjS3zx5CDMM76gP7EHIxEyelLGqtBdS18JAMypKVo5rPPl3cQKBgQCG -ueXLImQOr4dfDntLpSqV0BLAQcekZKhEPZJURmCHr37wGXNzpixurJyjL2w9MFqf -PRKwwJAJZ3Wp8kn2qkZd23x2Szb+LeBjJQS6Kh4o44zgixTz0r1K3qLygptxs+pO -Xz4LmQte+skKHo0rfW3tb3vKXnmR6fOBZgE23//2SwKBgHck44hoE1Ex2gDEfIq1 -04OBoS1cpuc9ge4uHEmv+8uANjzwlsYf8hY1qae513MGixRBOkxcI5xX/fYPQV9F -t3Jfh8QX85JjnGntuXuraYZJMUjpwXr3QHPx0jpvAM3Au5j6qD3biC9Vrwq9Chkg -hbiiPARizZA/Tsna/9ox1qDT ------END PRIVATE KEY-----` - resp, err := CBWrite(b, s, "issuers/import/bundle", map[string]interface{}{ - "pem_bundle": cert + "\n" + privKey, - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotEmpty(t, resp.Warnings) - originalWarnings := resp.Warnings - - resp, err = CBRead(b, s, "crl/rotate") - require.NoError(t, err) - require.NotNil(t, resp) - require.NotEmpty(t, resp.Warnings) - - // All CRL-specific warnings should've already occurred earlier on the - // import's CRL rebuild. - for _, warning := range resp.Warnings { - require.Contains(t, originalWarnings, warning) - } - - // Deleting the issuer and key should remove the warning. - _, err = CBDelete(b, s, "root") - require.NoError(t, err) - - resp, err = CBRead(b, s, "crl/rotate") - require.NoError(t, err) - require.NotNil(t, resp) - require.Empty(t, resp.Warnings) - - // Adding back just the cert shouldn't cause CRL rebuild warnings. - resp, err = CBWrite(b, s, "issuers/import/bundle", map[string]interface{}{ - "pem_bundle": cert, - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotNil(t, resp.Data["mapping"]) - require.NotEmpty(t, resp.Data["mapping"]) - require.Equal(t, len(resp.Data["mapping"].(map[string]string)), 1) - for key, value := range resp.Data["mapping"].(map[string]string) { - require.NotEmpty(t, key) - require.Empty(t, value) - } - - resp, err = CBRead(b, s, "crl/rotate") - require.NoError(t, err) - require.NotNil(t, resp) - require.Empty(t, resp.Warnings) -} diff --git a/builtin/logical/pki/crl_util.go b/builtin/logical/pki/crl_util.go index 894e427f101102..d10b6e14f0b079 100644 --- a/builtin/logical/pki/crl_util.go +++ b/builtin/logical/pki/crl_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -301,16 +298,16 @@ func (cb *crlBuilder) flushCRLBuildTimeInvalidation(sc *storageContext) error { // rebuildIfForced is to be called by readers or periodic functions that might need to trigger // a refresh of the CRL before the read occurs. -func (cb *crlBuilder) rebuildIfForced(sc *storageContext) ([]string, error) { +func (cb *crlBuilder) rebuildIfForced(sc *storageContext) error { if cb.forceRebuild.Load() { return cb._doRebuild(sc, true, _enforceForceFlag) } - return nil, nil + return nil } // rebuild is to be called by various write apis that know the CRL is to be updated and can be now. -func (cb *crlBuilder) rebuild(sc *storageContext, forceNew bool) ([]string, error) { +func (cb *crlBuilder) rebuild(sc *storageContext, forceNew bool) error { return cb._doRebuild(sc, forceNew, _ignoreForceFlag) } @@ -329,7 +326,7 @@ func (cb *crlBuilder) requestRebuildIfActiveNode(b *backend) { cb.forceRebuild.Store(true) } -func (cb *crlBuilder) _doRebuild(sc *storageContext, forceNew bool, ignoreForceFlag bool) ([]string, error) { +func (cb *crlBuilder) _doRebuild(sc *storageContext, forceNew bool, ignoreForceFlag bool) error { cb._builder.Lock() defer cb._builder.Unlock() // Re-read the lock in case someone beat us to the punch between the previous load op. @@ -346,7 +343,7 @@ func (cb *crlBuilder) _doRebuild(sc *storageContext, forceNew bool, ignoreForceF return buildCRLs(sc, myForceNew) } - return nil, nil + return nil } func (cb *crlBuilder) _getPresentDeltaWALForClearing(sc *storageContext, path string) ([]string, error) { @@ -415,7 +412,7 @@ func (cb *crlBuilder) clearUnifiedDeltaWAL(sc *storageContext, walSerials []stri return cb._clearDeltaWAL(sc, walSerials, unifiedDeltaWALPrefix) } -func (cb *crlBuilder) rebuildDeltaCRLsIfForced(sc *storageContext, override bool) ([]string, error) { +func (cb *crlBuilder) rebuildDeltaCRLsIfForced(sc *storageContext, override bool) error { // Delta CRLs use the same expiry duration as the complete CRL. Because // we always rebuild the complete CRL and then the delta CRL, we can // be assured that the delta CRL always expires after a complete CRL, @@ -427,18 +424,18 @@ func (cb *crlBuilder) rebuildDeltaCRLsIfForced(sc *storageContext, override bool // within our time window for updating it. cfg, err := cb.getConfigWithUpdate(sc) if err != nil { - return nil, err + return err } if !cfg.EnableDelta { // We explicitly do not update the last check time here, as we // want to persist the last rebuild window if it hasn't been set. - return nil, nil + return nil } deltaRebuildDuration, err := time.ParseDuration(cfg.DeltaRebuildInterval) if err != nil { - return nil, err + return err } // Acquire CRL building locks before we get too much further. @@ -454,7 +451,7 @@ func (cb *crlBuilder) rebuildDeltaCRLsIfForced(sc *storageContext, override bool // If we're still before the time of our next rebuild check, we can // safely return here even if we have certs. We'll wait for a bit, // retrigger this check, and then do the rebuild. - return nil, nil + return nil } // Update our check time. If we bail out below (due to storage errors @@ -465,16 +462,16 @@ func (cb *crlBuilder) rebuildDeltaCRLsIfForced(sc *storageContext, override bool rebuildLocal, err := cb._shouldRebuildLocalCRLs(sc, override) if err != nil { - return nil, fmt.Errorf("error determining if local CRLs should be rebuilt: %w", err) + return err } rebuildUnified, err := cb._shouldRebuildUnifiedCRLs(sc, override) if err != nil { - return nil, fmt.Errorf("error determining if unified CRLs should be rebuilt: %w", err) + return err } if !rebuildLocal && !rebuildUnified { - return nil, nil + return nil } // Finally, we must've needed to do the rebuild. Execute! @@ -601,14 +598,14 @@ func (cb *crlBuilder) _shouldRebuildUnifiedCRLs(sc *storageContext, override boo return shouldRebuild, nil } -func (cb *crlBuilder) rebuildDeltaCRLs(sc *storageContext, forceNew bool) ([]string, error) { +func (cb *crlBuilder) rebuildDeltaCRLs(sc *storageContext, forceNew bool) error { cb._builder.Lock() defer cb._builder.Unlock() return cb.rebuildDeltaCRLsHoldingLock(sc, forceNew) } -func (cb *crlBuilder) rebuildDeltaCRLsHoldingLock(sc *storageContext, forceNew bool) ([]string, error) { +func (cb *crlBuilder) rebuildDeltaCRLsHoldingLock(sc *storageContext, forceNew bool) error { return buildAnyCRLs(sc, forceNew, true /* building delta */) } @@ -1022,7 +1019,7 @@ func revokeCert(sc *storageContext, config *crlConfig, cert *x509.Certificate) ( if err != nil { return nil, fmt.Errorf("error saving revoked certificate to new location: %w", err) } - sc.Backend.ifCountEnabledIncrementTotalRevokedCertificatesCount(certsCounted, revEntry.Key) + sc.Backend.incrementTotalRevokedCertificatesCount(certsCounted, revEntry.Key) // From here on out, the certificate has been revoked locally. Any other // persistence issues might still err, but any other failure messages @@ -1064,7 +1061,7 @@ func revokeCert(sc *storageContext, config *crlConfig, cert *x509.Certificate) ( // already rebuilt the full CRL so the Delta WAL will be cleared // afterwards. Writing an entry only to immediately remove it // isn't necessary. - warnings, crlErr := sc.Backend.crlBuilder.rebuild(sc, false) + crlErr := sc.Backend.crlBuilder.rebuild(sc, false) if crlErr != nil { switch crlErr.(type) { case errutil.UserError: @@ -1073,9 +1070,6 @@ func revokeCert(sc *storageContext, config *crlConfig, cert *x509.Certificate) ( return nil, fmt.Errorf("error encountered during CRL building: %w", crlErr) } } - for index, warning := range warnings { - resp.AddWarning(fmt.Sprintf("Warning %d during CRL rebuild: %v", index+1, warning)) - } } else if config.EnableDelta { if err := writeRevocationDeltaWALs(sc, config, resp, failedWritingUnifiedCRL, hyphenSerial, colonSerial); err != nil { return nil, fmt.Errorf("failed to write WAL entries for Delta CRLs: %w", err) @@ -1146,7 +1140,7 @@ func writeSpecificRevocationDeltaWALs(sc *storageContext, hyphenSerial string, c var walInfo deltaWALInfo walEntry, err := logical.StorageEntryJSON(pathPrefix+hyphenSerial, walInfo) if err != nil { - return fmt.Errorf("unable to create delta CRL WAL entry: %w", err) + return fmt.Errorf("unable to create delta CRL WAL entry") } if err = sc.Storage.Put(sc.Context, walEntry); err != nil { @@ -1159,7 +1153,7 @@ func writeSpecificRevocationDeltaWALs(sc *storageContext, hyphenSerial string, c lastRevSerial := lastWALInfo{Serial: colonSerial} lastWALEntry, err := logical.StorageEntryJSON(pathPrefix+deltaWALLastRevokedSerialName, lastRevSerial) if err != nil { - return fmt.Errorf("unable to create last delta CRL WAL entry: %w", err) + return fmt.Errorf("unable to create last delta CRL WAL entry") } if err = sc.Storage.Put(sc.Context, lastWALEntry); err != nil { return fmt.Errorf("error saving last delta CRL WAL entry: %w", err) @@ -1168,11 +1162,11 @@ func writeSpecificRevocationDeltaWALs(sc *storageContext, hyphenSerial string, c return nil } -func buildCRLs(sc *storageContext, forceNew bool) ([]string, error) { +func buildCRLs(sc *storageContext, forceNew bool) error { return buildAnyCRLs(sc, forceNew, false) } -func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) ([]string, error) { +func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) error { // In order to build all CRLs, we need knowledge of all issuers. Any two // issuers with the same keys _and_ subject should have the same CRL since // they're functionally equivalent. @@ -1207,7 +1201,7 @@ func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) ([]string, er // buildCRL. globalCRLConfig, err := sc.Backend.crlBuilder.getConfigWithUpdate(sc) if err != nil { - return nil, fmt.Errorf("error building CRL: while updating config: %w", err) + return fmt.Errorf("error building CRL: while updating config: %w", err) } if globalCRLConfig.Disable && !forceNew { @@ -1219,13 +1213,13 @@ func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) ([]string, er // So, since tidy can now associate issuers on revocation entries, we // can skip the rest of this function and exit early without updating // anything. - return nil, nil + return nil } if !sc.Backend.useLegacyBundleCaStorage() { issuers, err = sc.listIssuers() if err != nil { - return nil, fmt.Errorf("error building CRL: while listing issuers: %w", err) + return fmt.Errorf("error building CRL: while listing issuers: %w", err) } } else { // Here, we hard-code the legacy issuer entry instead of using the @@ -1239,13 +1233,13 @@ func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) ([]string, er // Users should upgrade symmetrically, rather than attempting // backward compatibility for new features across disparate versions. if isDelta { - return []string{"refusing to rebuild delta CRL with legacy bundle; finish migrating to newer issuer storage layout"}, nil + return nil } } issuersConfig, err := sc.getIssuersConfig() if err != nil { - return nil, fmt.Errorf("error building CRLs: while getting the default config: %w", err) + return fmt.Errorf("error building CRLs: while getting the default config: %w", err) } // We map issuerID->entry for fast lookup and also issuerID->Cert for @@ -1262,7 +1256,7 @@ func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) ([]string, er // legacy path is automatically ignored. thisEntry, _, err := sc.fetchCertBundleByIssuerId(issuer, false) if err != nil { - return nil, fmt.Errorf("error building CRLs: unable to fetch specified issuer (%v): %w", issuer, err) + return fmt.Errorf("error building CRLs: unable to fetch specified issuer (%v): %w", issuer, err) } if len(thisEntry.KeyID) == 0 { @@ -1287,7 +1281,7 @@ func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) ([]string, er thisCert, err := thisEntry.GetCertificate() if err != nil { - return nil, fmt.Errorf("error building CRLs: unable to parse issuer (%v)'s certificate: %w", issuer, err) + return fmt.Errorf("error building CRLs: unable to parse issuer (%v)'s certificate: %w", issuer, err) } issuerIDCertMap[issuer] = thisCert @@ -1302,27 +1296,19 @@ func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) ([]string, er // Now we do two calls: building the cluster-local CRL, and potentially // building the global CRL if we're on the active node of the performance // primary. - currLocalDeltaSerials, localWarnings, err := buildAnyLocalCRLs(sc, issuersConfig, globalCRLConfig, + currLocalDeltaSerials, err := buildAnyLocalCRLs(sc, issuersConfig, globalCRLConfig, issuers, issuerIDEntryMap, issuerIDCertMap, keySubjectIssuersMap, wasLegacy, forceNew, isDelta) if err != nil { - return nil, err + return err } - currUnifiedDeltaSerials, unifiedWarnings, err := buildAnyUnifiedCRLs(sc, issuersConfig, globalCRLConfig, + currUnifiedDeltaSerials, err := buildAnyUnifiedCRLs(sc, issuersConfig, globalCRLConfig, issuers, issuerIDEntryMap, issuerIDCertMap, keySubjectIssuersMap, wasLegacy, forceNew, isDelta) if err != nil { - return nil, err - } - - var warnings []string - for _, warning := range localWarnings { - warnings = append(warnings, fmt.Sprintf("warning from local CRL rebuild: %v", warning)) - } - for _, warning := range unifiedWarnings { - warnings = append(warnings, fmt.Sprintf("warning from unified CRL rebuild: %v", warning)) + return err } // Finally, we decide if we need to rebuild the Delta CRLs again, for both @@ -1331,21 +1317,17 @@ func buildAnyCRLs(sc *storageContext, forceNew bool, isDelta bool) ([]string, er // After we've confirmed the primary CRLs have built OK, go ahead and // clear the delta CRL WAL and rebuild it. if err := sc.Backend.crlBuilder.clearLocalDeltaWAL(sc, currLocalDeltaSerials); err != nil { - return nil, fmt.Errorf("error building CRLs: unable to clear Delta WAL: %w", err) + return fmt.Errorf("error building CRLs: unable to clear Delta WAL: %w", err) } if err := sc.Backend.crlBuilder.clearUnifiedDeltaWAL(sc, currUnifiedDeltaSerials); err != nil { - return nil, fmt.Errorf("error building CRLs: unable to clear Delta WAL: %w", err) + return fmt.Errorf("error building CRLs: unable to clear Delta WAL: %w", err) } - deltaWarnings, err := sc.Backend.crlBuilder.rebuildDeltaCRLsHoldingLock(sc, forceNew) - if err != nil { - return nil, fmt.Errorf("error building CRLs: unable to rebuild empty Delta WAL: %w", err) - } - for _, warning := range deltaWarnings { - warnings = append(warnings, fmt.Sprintf("warning from delta CRL rebuild: %v", warning)) + if err := sc.Backend.crlBuilder.rebuildDeltaCRLsHoldingLock(sc, forceNew); err != nil { + return fmt.Errorf("error building CRLs: unable to rebuild empty Delta WAL: %w", err) } } - return warnings, nil + return nil } func getLastWALSerial(sc *storageContext, path string) (string, error) { @@ -1378,9 +1360,8 @@ func buildAnyLocalCRLs( wasLegacy bool, forceNew bool, isDelta bool, -) ([]string, []string, error) { +) ([]string, error) { var err error - var warnings []string // Before we load cert entries, we want to store the last seen delta WAL // serial number. The subsequent List will have at LEAST that certificate @@ -1391,7 +1372,7 @@ func buildAnyLocalCRLs( if isDelta { lastDeltaSerial, err = getLastWALSerial(sc, localDeltaWALLastRevokedSerial) if err != nil { - return nil, nil, err + return nil, err } } @@ -1402,7 +1383,7 @@ func buildAnyLocalCRLs( if !isDelta { currDeltaCerts, err = sc.Backend.crlBuilder.getPresentLocalDeltaWALForClearing(sc) if err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to get present delta WAL entries for removal: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to get present delta WAL entries for removal: %w", err) } } @@ -1417,7 +1398,7 @@ func buildAnyLocalCRLs( // a separate pool for those. unassignedCerts, revokedCertsMap, err = getLocalRevokedCertEntries(sc, issuerIDCertMap, isDelta) if err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to get revoked certificate entries: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to get revoked certificate entries: %w", err) } if !isDelta { @@ -1430,7 +1411,7 @@ func buildAnyLocalCRLs( // duplicate this serial number on the delta, hence the above // guard for isDelta. if err := augmentWithRevokedIssuers(issuerIDEntryMap, issuerIDCertMap, revokedCertsMap); err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to parse revoked issuers: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to parse revoked issuers: %w", err) } } } @@ -1439,25 +1420,21 @@ func buildAnyLocalCRLs( // CRLs. internalCRLConfig, err := sc.getLocalCRLConfig() if err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to fetch cluster-local CRL configuration: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to fetch cluster-local CRL configuration: %w", err) } - rebuildWarnings, err := buildAnyCRLsWithCerts(sc, issuersConfig, globalCRLConfig, internalCRLConfig, + if err := buildAnyCRLsWithCerts(sc, issuersConfig, globalCRLConfig, internalCRLConfig, issuers, issuerIDEntryMap, keySubjectIssuersMap, unassignedCerts, revokedCertsMap, - forceNew, false /* isUnified */, isDelta) - if err != nil { - return nil, nil, fmt.Errorf("error building CRLs: %w", err) - } - if len(rebuildWarnings) > 0 { - warnings = append(warnings, rebuildWarnings...) + forceNew, false /* isUnified */, isDelta); err != nil { + return nil, fmt.Errorf("error building CRLs: %w", err) } // Finally, persist our potentially updated local CRL config. Only do this // if we didn't have a legacy CRL bundle. if !wasLegacy { if err := sc.setLocalCRLConfig(internalCRLConfig); err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to persist updated cluster-local CRL config: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to persist updated cluster-local CRL config: %w", err) } } @@ -1473,17 +1450,17 @@ func buildAnyLocalCRLs( lastDeltaBuildEntry, err := logical.StorageEntryJSON(localDeltaWALLastBuildSerial, deltaInfo) if err != nil { - return nil, nil, fmt.Errorf("error creating last delta CRL rebuild serial entry: %w", err) + return nil, fmt.Errorf("error creating last delta CRL rebuild serial entry: %w", err) } err = sc.Storage.Put(sc.Context, lastDeltaBuildEntry) if err != nil { - return nil, nil, fmt.Errorf("error persisting last delta CRL rebuild info: %w", err) + return nil, fmt.Errorf("error persisting last delta CRL rebuild info: %w", err) } } } - return currDeltaCerts, warnings, nil + return currDeltaCerts, nil } func buildAnyUnifiedCRLs( @@ -1497,20 +1474,19 @@ func buildAnyUnifiedCRLs( wasLegacy bool, forceNew bool, isDelta bool, -) ([]string, []string, error) { +) ([]string, error) { var err error - var warnings []string // Unified CRL can only be built by the main cluster. b := sc.Backend if b.System().ReplicationState().HasState(consts.ReplicationDRSecondary|consts.ReplicationPerformanceStandby) || (!b.System().LocalMount() && b.System().ReplicationState().HasState(consts.ReplicationPerformanceSecondary)) { - return nil, nil, nil + return nil, nil } // Unified CRL should only be built if enabled. if !globalCRLConfig.UnifiedCRL && !forceNew { - return nil, nil, nil + return nil, nil } // Before we load cert entries, we want to store the last seen delta WAL @@ -1524,14 +1500,14 @@ func buildAnyUnifiedCRLs( if isDelta { clusters, err := sc.Storage.List(sc.Context, unifiedDeltaWALPrefix) if err != nil { - return nil, nil, fmt.Errorf("error listing clusters for unified delta WAL building: %w", err) + return nil, fmt.Errorf("error listing clusters for unified delta WAL building: %w", err) } for index, cluster := range clusters { path := unifiedDeltaWALPrefix + cluster + deltaWALLastRevokedSerialName serial, err := getLastWALSerial(sc, path) if err != nil { - return nil, nil, fmt.Errorf("error getting last written Delta WAL serial for cluster (%v / %v): %w", index, cluster, err) + return nil, fmt.Errorf("error getting last written Delta WAL serial for cluster (%v / %v): %w", index, cluster, err) } lastDeltaSerial[cluster] = serial @@ -1545,7 +1521,7 @@ func buildAnyUnifiedCRLs( if !isDelta { currDeltaCerts, err = sc.Backend.crlBuilder.getPresentUnifiedDeltaWALForClearing(sc) if err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to get present delta WAL entries for removal: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to get present delta WAL entries for removal: %w", err) } } @@ -1560,7 +1536,7 @@ func buildAnyUnifiedCRLs( // a separate pool for those. unassignedCerts, revokedCertsMap, err = getUnifiedRevokedCertEntries(sc, issuerIDCertMap, isDelta) if err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to get revoked certificate entries: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to get revoked certificate entries: %w", err) } if !isDelta { @@ -1573,7 +1549,7 @@ func buildAnyUnifiedCRLs( // duplicate this serial number on the delta, hence the above // guard for isDelta. if err := augmentWithRevokedIssuers(issuerIDEntryMap, issuerIDCertMap, revokedCertsMap); err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to parse revoked issuers: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to parse revoked issuers: %w", err) } } } @@ -1582,25 +1558,21 @@ func buildAnyUnifiedCRLs( // CRLs. internalCRLConfig, err := sc.getUnifiedCRLConfig() if err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to fetch cluster-local CRL configuration: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to fetch cluster-local CRL configuration: %w", err) } - rebuildWarnings, err := buildAnyCRLsWithCerts(sc, issuersConfig, globalCRLConfig, internalCRLConfig, + if err := buildAnyCRLsWithCerts(sc, issuersConfig, globalCRLConfig, internalCRLConfig, issuers, issuerIDEntryMap, keySubjectIssuersMap, unassignedCerts, revokedCertsMap, - forceNew, true /* isUnified */, isDelta) - if err != nil { - return nil, nil, fmt.Errorf("error building CRLs: %w", err) - } - if len(rebuildWarnings) > 0 { - warnings = append(warnings, rebuildWarnings...) + forceNew, true /* isUnified */, isDelta); err != nil { + return nil, fmt.Errorf("error building CRLs: %w", err) } // Finally, persist our potentially updated local CRL config. Only do this // if we didn't have a legacy CRL bundle. if !wasLegacy { if err := sc.setUnifiedCRLConfig(internalCRLConfig); err != nil { - return nil, nil, fmt.Errorf("error building CRLs: unable to persist updated cluster-local CRL config: %w", err) + return nil, fmt.Errorf("error building CRLs: unable to persist updated cluster-local CRL config: %w", err) } } @@ -1624,17 +1596,17 @@ func buildAnyUnifiedCRLs( deltaInfo := lastDeltaInfo{Serial: serial} lastDeltaBuildEntry, err := logical.StorageEntryJSON(path, deltaInfo) if err != nil { - return nil, nil, fmt.Errorf("error creating last delta CRL rebuild serial entry: %w", err) + return nil, fmt.Errorf("error creating last delta CRL rebuild serial entry: %w", err) } err = sc.Storage.Put(sc.Context, lastDeltaBuildEntry) if err != nil { - return nil, nil, fmt.Errorf("error persisting last delta CRL rebuild info: %w", err) + return nil, fmt.Errorf("error persisting last delta CRL rebuild info: %w", err) } } } - return currDeltaCerts, warnings, nil + return currDeltaCerts, nil } func buildAnyCRLsWithCerts( @@ -1650,10 +1622,9 @@ func buildAnyCRLsWithCerts( forceNew bool, isUnified bool, isDelta bool, -) ([]string, error) { +) error { // Now we can call buildCRL once, on an arbitrary/representative issuer // from each of these (keyID, subject) sets. - var warnings []string for _, subjectIssuersMap := range keySubjectIssuersMap { for _, issuersSet := range subjectIssuersMap { if len(issuersSet) == 0 { @@ -1701,7 +1672,7 @@ func buildAnyCRLsWithCerts( // Finally, check our crlIdentifier. if thisCRLId, ok := internalCRLConfig.IssuerIDCRLMap[issuerId]; ok && len(thisCRLId) > 0 { if len(crlIdentifier) > 0 && crlIdentifier != thisCRLId { - return nil, fmt.Errorf("error building CRLs: two issuers with same keys/subjects (%v vs %v) have different internal CRL IDs: %v vs %v", issuerId, crlIdIssuer, thisCRLId, crlIdentifier) + return fmt.Errorf("error building CRLs: two issuers with same keys/subjects (%v vs %v) have different internal CRL IDs: %v vs %v", issuerId, crlIdIssuer, thisCRLId, crlIdentifier) } crlIdentifier = thisCRLId @@ -1713,24 +1684,6 @@ func buildAnyCRLsWithCerts( // Skip this set for the time being; while we have valid // issuers and associated keys, this occurred because we lack // crl-signing usage on all issuers in this set. - // - // But, tell the user about this, so they can either correct - // this by reissuing the CA certificate or adding an equivalent - // version with KU bits if the CA cert lacks KU altogether. - // - // See also: https://github.com/hashicorp/vault/issues/20137 - warning := "Issuer equivalency set with associated keys lacked an issuer with CRL Signing KeyUsage; refusing to rebuild CRL for this group of issuers: " - var issuers []string - for _, issuerId := range issuersSet { - issuers = append(issuers, issuerId.String()) - } - warning += strings.Join(issuers, ",") - - // We only need this warning once. :-) - if !isUnified && !isDelta { - warnings = append(warnings, warning) - } - continue } @@ -1771,7 +1724,7 @@ func buildAnyCRLsWithCerts( // Lastly, build the CRL. nextUpdate, err := buildCRL(sc, globalCRLConfig, forceNew, representative, revokedCerts, crlIdentifier, crlNumber, isUnified, isDelta, lastCompleteNumber) if err != nil { - return nil, fmt.Errorf("error building CRLs: unable to build CRL for issuer (%v): %w", representative, err) + return fmt.Errorf("error building CRLs: unable to build CRL for issuer (%v): %w", representative, err) } internalCRLConfig.CRLExpirationMap[crlIdentifier] = *nextUpdate @@ -1820,13 +1773,13 @@ func buildAnyCRLsWithCerts( if !stillHaveIssuerForID { if err := sc.Storage.Delete(sc.Context, "crls/"+crlId.String()); err != nil { - return nil, fmt.Errorf("error building CRLs: unable to clean up deleted issuers' CRL: %w", err) + return fmt.Errorf("error building CRLs: unable to clean up deleted issuers' CRL: %w", err) } } } // All good :-) - return warnings, nil + return nil } func isRevInfoIssuerValid(revInfo *revocationInfo, issuerIDCertMap map[issuerID]*x509.Certificate) bool { diff --git a/builtin/logical/pki/dnstest/server.go b/builtin/logical/pki/dnstest/server.go deleted file mode 100644 index 87091ed5eb60ec..00000000000000 --- a/builtin/logical/pki/dnstest/server.go +++ /dev/null @@ -1,415 +0,0 @@ -package dnstest - -import ( - "context" - "fmt" - "net" - "strings" - "sync" - "testing" - "time" - - "github.com/hashicorp/vault/helper/testhelpers/corehelpers" - "github.com/hashicorp/vault/sdk/helper/docker" - "github.com/stretchr/testify/require" -) - -type TestServer struct { - t *testing.T - ctx context.Context - - runner *docker.Runner - network string - startup *docker.Service - - lock sync.Mutex - serial int - forwarders []string - domains []string - records map[string]map[string][]string // domain -> record -> value(s). - - cleanup func() -} - -func SetupResolver(t *testing.T, domain string) *TestServer { - return SetupResolverOnNetwork(t, domain, "") -} - -func SetupResolverOnNetwork(t *testing.T, domain string, network string) *TestServer { - var ts TestServer - ts.t = t - ts.ctx = context.Background() - ts.domains = []string{domain} - ts.records = map[string]map[string][]string{} - ts.network = network - - ts.setupRunner(domain, network) - ts.startContainer(network) - ts.PushConfig() - - return &ts -} - -func (ts *TestServer) setupRunner(domain string, network string) { - var err error - ts.runner, err = docker.NewServiceRunner(docker.RunOptions{ - ImageRepo: "ubuntu/bind9", - ImageTag: "latest", - ContainerName: "bind9-dns-" + strings.ReplaceAll(domain, ".", "-"), - NetworkName: network, - Ports: []string{"53/udp"}, - LogConsumer: func(s string) { - ts.t.Logf(s) - }, - }) - require.NoError(ts.t, err) -} - -func (ts *TestServer) startContainer(network string) { - connUpFunc := func(ctx context.Context, host string, port int) (docker.ServiceConfig, error) { - // Perform a simple connection to this resolver, even though the - // default configuration doesn't do anything useful. - peer, err := net.ResolveUDPAddr("udp", fmt.Sprintf("%s:%d", host, port)) - if err != nil { - return nil, fmt.Errorf("failed to resolve peer: %v / %v: %w", host, port, err) - } - - conn, err := net.DialUDP("udp", nil, peer) - if err != nil { - return nil, fmt.Errorf("failed to dial peer: %v / %v / %v: %w", host, port, peer, err) - } - defer conn.Close() - - _, err = conn.Write([]byte("garbage-in")) - if err != nil { - return nil, fmt.Errorf("failed to write to peer: %v / %v / %v: %w", host, port, peer, err) - } - - // Connection worked. - return docker.NewServiceHostPort(host, port), nil - } - - result, _, err := ts.runner.StartNewService(ts.ctx, true, true, connUpFunc) - require.NoError(ts.t, err, "failed to start dns resolver for "+ts.domains[0]) - ts.startup = result - - if ts.startup.StartResult.RealIP == "" { - mapping, err := ts.runner.GetNetworkAndAddresses(ts.startup.Container.ID) - require.NoError(ts.t, err, "failed to fetch network addresses to correct missing real IP address") - if len(network) == 0 { - require.Equal(ts.t, 1, len(mapping), "expected exactly one network address") - for network = range mapping { - // Because mapping is a map of network name->ip, we need - // to use the above range's assignment to get the name, - // as there is no other way of getting the keys of a map. - } - } - require.Contains(ts.t, mapping, network, "expected network to be part of the mapping") - ts.startup.StartResult.RealIP = mapping[network] - } - - ts.t.Logf("[dnsserv] Addresses of DNS resolver: local=%v / container=%v", ts.GetLocalAddr(), ts.GetRemoteAddr()) -} - -func (ts *TestServer) buildNamedConf() string { - forwarders := "\n" - if len(ts.forwarders) > 0 { - forwarders = "\tforwarders {\n" - for _, forwarder := range ts.forwarders { - forwarders += "\t\t" + forwarder + ";\n" - } - forwarders += "\t};\n" - } - - zones := "\n" - for _, domain := range ts.domains { - zones += fmt.Sprintf("zone \"%s\" {\n", domain) - zones += "\ttype primary;\n" - zones += fmt.Sprintf("\tfile \"%s.zone\";\n", domain) - zones += "\tallow-update {\n\t\tnone;\n\t};\n" - zones += "\tnotify no;\n" - zones += "};\n\n" - } - - // Reverse lookups are not handles as they're not presently necessary. - - cfg := `options { - directory "/var/cache/bind"; - - dnssec-validation no; - - ` + forwarders + ` -}; - -` + zones - - return cfg -} - -func (ts *TestServer) buildZoneFile(target string) string { - // One second TTL by default to allow quick refreshes. - zone := "$TTL 1;\n" - - ts.serial += 1 - zone += fmt.Sprintf("@\tIN\tSOA\tns.%v.\troot.%v.\t(\n", target, target) - zone += fmt.Sprintf("\t\t\t%d;\n\t\t\t1;\n\t\t\t1;\n\t\t\t2;\n\t\t\t1;\n\t\t\t)\n\n", ts.serial) - zone += fmt.Sprintf("@\tIN\tNS\tns%d.%v.\n", ts.serial, target) - zone += fmt.Sprintf("ns%d.%v.\tIN\tA\t%v\n", ts.serial, target, "127.0.0.1") - - for domain, records := range ts.records { - if !strings.HasSuffix(domain, target) { - continue - } - - for recordType, values := range records { - for _, value := range values { - zone += fmt.Sprintf("%s.\tIN\t%s\t%s\n", domain, recordType, value) - } - } - } - - return zone -} - -func (ts *TestServer) pushNamedConf() { - contents := docker.NewBuildContext() - cfgPath := "/etc/bind/named.conf.options" - namedCfg := ts.buildNamedConf() - contents[cfgPath] = docker.PathContentsFromString(namedCfg) - contents[cfgPath].SetOwners(0, 142) // root, bind - - ts.t.Logf("Generated bind9 config (%s):\n%v\n", cfgPath, namedCfg) - - err := ts.runner.CopyTo(ts.startup.Container.ID, "/", contents) - require.NoError(ts.t, err, "failed pushing updated named.conf.options to container") -} - -func (ts *TestServer) pushZoneFiles() { - contents := docker.NewBuildContext() - - for _, domain := range ts.domains { - path := "/var/cache/bind/" + domain + ".zone" - zoneFile := ts.buildZoneFile(domain) - contents[path] = docker.PathContentsFromString(zoneFile) - contents[path].SetOwners(0, 142) // root, bind - - ts.t.Logf("Generated bind9 zone file for %v (%s):\n%v\n", domain, path, zoneFile) - } - - err := ts.runner.CopyTo(ts.startup.Container.ID, "/", contents) - require.NoError(ts.t, err, "failed pushing updated named.conf.options to container") -} - -func (ts *TestServer) PushConfig() { - ts.lock.Lock() - defer ts.lock.Unlock() - - // There's two cases here: - // - // 1. We've added a new top-level domain name. Here, we want to make - // sure the new zone file is pushed before we push the reference - // to it. - // 2. We've just added a new. Here, the order doesn't matter, but - // mostly likely the second push will be a no-op. - ts.pushZoneFiles() - ts.pushNamedConf() - - // Wait until our config has taken. - corehelpers.RetryUntil(ts.t, 15*time.Second, func() error { - // bind reloads based on file mtime, touch files before starting - // to make sure it has been updated more recently than when the - // last update was written. Then issue a new SIGHUP. - for _, domain := range ts.domains { - path := "/var/cache/bind/" + domain + ".zone" - touchCmd := []string{"touch", path} - - _, _, _, err := ts.runner.RunCmdWithOutput(ts.ctx, ts.startup.Container.ID, touchCmd) - if err != nil { - return fmt.Errorf("failed to update zone mtime: %w", err) - } - } - ts.runner.DockerAPI.ContainerKill(ts.ctx, ts.startup.Container.ID, "SIGHUP") - - // Connect to our bind resolver. - resolver := &net.Resolver{ - PreferGo: true, - StrictErrors: false, - Dial: func(ctx context.Context, network, address string) (net.Conn, error) { - d := net.Dialer{ - Timeout: 10 * time.Second, - } - return d.DialContext(ctx, network, ts.GetLocalAddr()) - }, - } - - // last domain has the given serial number, which also appears in the - // NS record so we can fetch it via Go. - lastDomain := ts.domains[len(ts.domains)-1] - records, err := resolver.LookupNS(ts.ctx, lastDomain) - if err != nil { - return fmt.Errorf("failed to lookup NS record for %v: %w", lastDomain, err) - } - - if len(records) != 1 { - return fmt.Errorf("expected only 1 NS record for %v, got %v/%v", lastDomain, len(records), records) - } - - expectedNS := fmt.Sprintf("ns%d.%v.", ts.serial, lastDomain) - if records[0].Host != expectedNS { - return fmt.Errorf("expected to find NS %v, got %v indicating reload hadn't completed", expectedNS, records[0]) - } - - return nil - }) -} - -func (ts *TestServer) GetLocalAddr() string { - return ts.startup.Config.Address() -} - -func (ts *TestServer) GetRemoteAddr() string { - return fmt.Sprintf("%s:%d", ts.startup.StartResult.RealIP, 53) -} - -func (ts *TestServer) AddDomain(domain string) { - ts.lock.Lock() - defer ts.lock.Unlock() - - for _, existing := range ts.domains { - if existing == domain { - return - } - } - - ts.domains = append(ts.domains, domain) -} - -func (ts *TestServer) AddRecord(domain string, record string, value string) { - ts.lock.Lock() - defer ts.lock.Unlock() - - foundDomain := false - for _, existing := range ts.domains { - if strings.HasSuffix(domain, existing) { - foundDomain = true - break - } - } - if !foundDomain { - ts.t.Fatalf("cannot add record %v/%v :: [%v] -- no domain zone matching (%v)", record, domain, value, ts.domains) - } - - value = strings.TrimSpace(value) - if _, present := ts.records[domain]; !present { - ts.records[domain] = map[string][]string{} - } - - if values, present := ts.records[domain][record]; present { - for _, candidate := range values { - if candidate == value { - // Already present; skip adding. - return - } - } - } - - ts.records[domain][record] = append(ts.records[domain][record], value) -} - -func (ts *TestServer) RemoveRecord(domain string, record string, value string) { - ts.lock.Lock() - defer ts.lock.Unlock() - - foundDomain := false - for _, existing := range ts.domains { - if strings.HasSuffix(domain, existing) { - foundDomain = true - break - } - } - if !foundDomain { - // Not found. - return - } - - value = strings.TrimSpace(value) - if _, present := ts.records[domain]; !present { - // Not found. - return - } - - var remaining []string - if values, present := ts.records[domain][record]; present { - for _, candidate := range values { - if candidate != value { - remaining = append(remaining, candidate) - } - } - } - - ts.records[domain][record] = remaining -} - -func (ts *TestServer) RemoveRecordsOfTypeForDomain(domain string, record string) { - ts.lock.Lock() - defer ts.lock.Unlock() - - foundDomain := false - for _, existing := range ts.domains { - if strings.HasSuffix(domain, existing) { - foundDomain = true - break - } - } - if !foundDomain { - // Not found. - return - } - - if _, present := ts.records[domain]; !present { - // Not found. - return - } - - delete(ts.records[domain], record) -} - -func (ts *TestServer) RemoveRecordsForDomain(domain string) { - ts.lock.Lock() - defer ts.lock.Unlock() - - foundDomain := false - for _, existing := range ts.domains { - if strings.HasSuffix(domain, existing) { - foundDomain = true - break - } - } - if !foundDomain { - // Not found. - return - } - - if _, present := ts.records[domain]; !present { - // Not found. - return - } - - ts.records[domain] = map[string][]string{} -} - -func (ts *TestServer) RemoveAllRecords() { - ts.lock.Lock() - defer ts.lock.Unlock() - - ts.records = map[string]map[string][]string{} -} - -func (ts *TestServer) Cleanup() { - if ts.cleanup != nil { - ts.cleanup() - } - if ts.startup != nil && ts.startup.Cleanup != nil { - ts.startup.Cleanup() - } -} diff --git a/builtin/logical/pki/fields.go b/builtin/logical/pki/fields.go index b29353985e2161..c5c68bd3783a9d 100644 --- a/builtin/logical/pki/fields.go +++ b/builtin/logical/pki/fields.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -492,23 +489,6 @@ this removes ALL issuers within the mount (and is thus not desirable in most operational scenarios).`, } - fields["tidy_acme"] = &framework.FieldSchema{ - Type: framework.TypeBool, - Description: `Set to true to enable tidying ACME accounts, -orders and authorizations. ACME orders are tidied (deleted) -safety_buffer after the certificate associated with them expires, -or after the order and relevant authorizations have expired if no -certificate was produced. Authorizations are tidied with the -corresponding order. - -When a valid ACME Account is at least acme_account_safety_buffer -old, and has no remaining orders associated with it, the account is -marked as revoked. After another acme_account_safety_buffer has -passed from the revocation or deactivation date, a revoked or -deactivated ACME account is deleted.`, - Default: false, - } - fields["safety_buffer"] = &framework.FieldSchema{ Type: framework.TypeDurationSecond, Description: `The amount of extra time that must have passed @@ -527,14 +507,6 @@ Defaults to 8760 hours (1 year).`, Default: int(defaultTidyConfig.IssuerSafetyBuffer / time.Second), // TypeDurationSecond currently requires defaults to be int } - fields["acme_account_safety_buffer"] = &framework.FieldSchema{ - Type: framework.TypeDurationSecond, - Description: `The amount of time that must pass after creation -that an account with no orders is marked revoked, and the amount of time -after being marked revoked or deactivated.`, - Default: int(defaultTidyConfig.AcmeAccountSafetyBuffer / time.Second), // TypeDurationSecond currently requires defaults to be int - } - fields["pause_duration"] = &framework.FieldSchema{ Type: framework.TypeString, Description: `The amount of time to wait between processing @@ -574,72 +546,3 @@ primary node.`, return fields } - -// generate the entire list of schema fields we need for CSR sign verbatim, this is also -// leveraged by ACME internally. -func getCsrSignVerbatimSchemaFields() map[string]*framework.FieldSchema { - fields := map[string]*framework.FieldSchema{} - fields = addNonCACommonFields(fields) - fields = addSignVerbatimRoleFields(fields) - - fields["csr"] = &framework.FieldSchema{ - Type: framework.TypeString, - Default: "", - Description: `PEM-format CSR to be signed. Values will be -taken verbatim from the CSR, except for -basic constraints.`, - } - - return fields -} - -// addSignVerbatimRoleFields provides the fields and defaults to be used by anything that is building up the fields -// and their corresponding default values when generating/using a sign-verbatim type role such as buildSignVerbatimRole. -func addSignVerbatimRoleFields(fields map[string]*framework.FieldSchema) map[string]*framework.FieldSchema { - fields["key_usage"] = &framework.FieldSchema{ - Type: framework.TypeCommaStringSlice, - Default: []string{"DigitalSignature", "KeyAgreement", "KeyEncipherment"}, - Description: `A comma-separated string or list of key usages (not extended -key usages). Valid values can be found at -https://golang.org/pkg/crypto/x509/#KeyUsage --- simply drop the "KeyUsage" part of the name. -To remove all key usages from being set, set -this value to an empty list.`, - } - - fields["ext_key_usage"] = &framework.FieldSchema{ - Type: framework.TypeCommaStringSlice, - Default: []string{}, - Description: `A comma-separated string or list of extended key usages. Valid values can be found at -https://golang.org/pkg/crypto/x509/#ExtKeyUsage --- simply drop the "ExtKeyUsage" part of the name. -To remove all key usages from being set, set -this value to an empty list.`, - } - - fields["ext_key_usage_oids"] = &framework.FieldSchema{ - Type: framework.TypeCommaStringSlice, - Description: `A comma-separated string or list of extended key usage oids.`, - } - - fields["signature_bits"] = &framework.FieldSchema{ - Type: framework.TypeInt, - Default: 0, - Description: `The number of bits to use in the signature -algorithm; accepts 256 for SHA-2-256, 384 for SHA-2-384, and 512 for -SHA-2-512. Defaults to 0 to automatically detect based on key length -(SHA-2-256 for RSA keys, and matching the curve size for NIST P-Curves).`, - DisplayAttrs: &framework.DisplayAttributes{ - Value: 0, - }, - } - - fields["use_pss"] = &framework.FieldSchema{ - Type: framework.TypeBool, - Default: false, - Description: `Whether or not to use PSS signatures when using a -RSA key-type issuer. Defaults to false.`, - } - - return fields -} diff --git a/builtin/logical/pki/integration_test.go b/builtin/logical/pki/integration_test.go index f6e9ec6482593d..c2bdffbde3fbab 100644 --- a/builtin/logical/pki/integration_test.go +++ b/builtin/logical/pki/integration_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -14,14 +11,7 @@ import ( "fmt" "testing" - "github.com/hashicorp/vault/api" - vaulthttp "github.com/hashicorp/vault/http" - vaultocsp "github.com/hashicorp/vault/sdk/helper/ocsp" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/vault" - - "github.com/hashicorp/go-hclog" "github.com/stretchr/testify/require" ) @@ -381,7 +371,6 @@ func TestIntegration_AutoIssuer(t *testing.T) { "issuer_name": "root-1", "key_type": "ec", }) - requireSuccessNonNilResponse(t, resp, err) issuerIdOne := resp.Data["issuer_id"] require.NotEmpty(t, issuerIdOne) @@ -392,15 +381,12 @@ func TestIntegration_AutoIssuer(t *testing.T) { requireSuccessNonNilResponse(t, resp, err) require.Equal(t, issuerIdOne, resp.Data["default"]) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/issuers"), logical.ReadOperation), resp, true) - // Enable the new config option. - resp, err = CBWrite(b, s, "config/issuers", map[string]interface{}{ + _, err = CBWrite(b, s, "config/issuers", map[string]interface{}{ "default": issuerIdOne, "default_follows_latest_issuer": true, }) require.NoError(t, err) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/issuers"), logical.UpdateOperation), resp, true) // Now generate the second root; it should become default. resp, err = CBWrite(b, s, "root/generate/internal", map[string]interface{}{ @@ -474,169 +460,6 @@ func TestIntegration_AutoIssuer(t *testing.T) { require.Equal(t, issuerIdOneReimported, resp.Data["default"]) } -func TestIntegrationOCSPClientWithPKI(t *testing.T) { - t.Parallel() - - coreConfig := &vault.CoreConfig{ - LogicalBackends: map[string]logical.Factory{ - "pki": Factory, - }, - } - cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - }) - - cluster.Start() - defer cluster.Cleanup() - cores := cluster.Cores - vault.TestWaitActive(t, cores[0].Core) - client := cores[0].Client - - err := client.Sys().Mount("pki", &api.MountInput{ - Type: "pki", - Config: api.MountConfigInput{ - DefaultLeaseTTL: "16h", - MaxLeaseTTL: "32h", - }, - }) - require.NoError(t, err) - - resp, err := client.Logical().Write("pki/root/generate/internal", map[string]interface{}{ - "ttl": "40h", - "common_name": "Root R1", - "key_type": "ec", - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["issuer_id"]) - rootIssuerId := resp.Data["issuer_id"].(string) - - // Set URLs pointing to the issuer. - _, err = client.Logical().Write("pki/config/cluster", map[string]interface{}{ - "path": client.Address() + "/v1/pki", - "aia_path": client.Address() + "/v1/pki", - }) - require.NoError(t, err) - - _, err = client.Logical().Write("pki/config/urls", map[string]interface{}{ - "enable_templating": true, - "crl_distribution_points": "{{cluster_aia_path}}/issuer/{{issuer_id}}/crl/der", - "issuing_certificates": "{{cluster_aia_path}}/issuer/{{issuer_id}}/der", - "ocsp_servers": "{{cluster_aia_path}}/ocsp", - }) - require.NoError(t, err) - - // Build an intermediate CA - resp, err = client.Logical().Write("pki/intermediate/generate/internal", map[string]interface{}{ - "common_name": "Int X1", - "key_type": "ec", - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["csr"]) - intermediateCSR := resp.Data["csr"].(string) - - resp, err = client.Logical().Write("pki/root/sign-intermediate", map[string]interface{}{ - "csr": intermediateCSR, - "ttl": "20h", - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["certificate"]) - intermediateCert := resp.Data["certificate"] - - resp, err = client.Logical().Write("pki/intermediate/set-signed", map[string]interface{}{ - "certificate": intermediateCert, - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["imported_issuers"]) - rawImportedIssuers := resp.Data["imported_issuers"].([]interface{}) - require.Equal(t, len(rawImportedIssuers), 1) - importedIssuer := rawImportedIssuers[0].(string) - require.NotEmpty(t, importedIssuer) - - // Set intermediate as default. - _, err = client.Logical().Write("pki/config/issuers", map[string]interface{}{ - "default": importedIssuer, - }) - require.NoError(t, err) - - // Setup roles for root, intermediate. - _, err = client.Logical().Write("pki/roles/example-root", map[string]interface{}{ - "allowed_domains": "example.com", - "allow_subdomains": "true", - "max_ttl": "1h", - "key_type": "ec", - "issuer_ref": rootIssuerId, - }) - require.NoError(t, err) - - _, err = client.Logical().Write("pki/roles/example-int", map[string]interface{}{ - "allowed_domains": "example.com", - "allow_subdomains": "true", - "max_ttl": "1h", - "key_type": "ec", - }) - require.NoError(t, err) - - // Issue certs and validate them against OCSP. - for _, path := range []string{"pki/issue/example-int", "pki/issue/example-root"} { - t.Logf("Validating against path: %v", path) - resp, err = client.Logical().Write(path, map[string]interface{}{ - "common_name": "test.example.com", - "ttl": "5m", - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["certificate"]) - require.NotEmpty(t, resp.Data["issuing_ca"]) - require.NotEmpty(t, resp.Data["serial_number"]) - - certPEM := resp.Data["certificate"].(string) - certBlock, _ := pem.Decode([]byte(certPEM)) - require.NotNil(t, certBlock) - cert, err := x509.ParseCertificate(certBlock.Bytes) - require.NoError(t, err) - require.NotNil(t, cert) - - issuerPEM := resp.Data["issuing_ca"].(string) - issuerBlock, _ := pem.Decode([]byte(issuerPEM)) - require.NotNil(t, issuerBlock) - issuer, err := x509.ParseCertificate(issuerBlock.Bytes) - require.NoError(t, err) - require.NotNil(t, issuer) - - serialNumber := resp.Data["serial_number"].(string) - - testLogger := hclog.New(hclog.DefaultOptions) - - conf := &vaultocsp.VerifyConfig{ - OcspFailureMode: vaultocsp.FailOpenFalse, - ExtraCas: []*x509.Certificate{cluster.CACert}, - } - ocspClient := vaultocsp.New(func() hclog.Logger { - return testLogger - }, 10) - - err = ocspClient.VerifyLeafCertificate(context.Background(), cert, issuer, conf) - require.NoError(t, err) - - _, err = client.Logical().Write("pki/revoke", map[string]interface{}{ - "serial_number": serialNumber, - }) - require.NoError(t, err) - - err = ocspClient.VerifyLeafCertificate(context.Background(), cert, issuer, conf) - require.Error(t, err) - } -} - func genTestRootCa(t *testing.T, b *backend, s logical.Storage) (issuerID, keyID) { return genTestRootCaWithIssuerName(t, b, s, "") } diff --git a/builtin/logical/pki/key_util.go b/builtin/logical/pki/key_util.go index 5f2d19c65dad4b..c40831714625e4 100644 --- a/builtin/logical/pki/key_util.go +++ b/builtin/logical/pki/key_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/managed_key_util.go b/builtin/logical/pki/managed_key_util.go index 42e031deceb312..29ab43381329e4 100644 --- a/builtin/logical/pki/managed_key_util.go +++ b/builtin/logical/pki/managed_key_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package pki diff --git a/builtin/logical/pki/path_acme_account.go b/builtin/logical/pki/path_acme_account.go deleted file mode 100644 index 36801df151bff3..00000000000000 --- a/builtin/logical/pki/path_acme_account.go +++ /dev/null @@ -1,474 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "fmt" - "net/http" - "path" - "strings" - "time" - - "github.com/hashicorp/go-secure-stdlib/strutil" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -func uuidNameRegex(name string) string { - return fmt.Sprintf("(?P<%s>[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}?)", name) -} - -func pathAcmeNewAccount(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeNewAccount, "/new-account") -} - -func pathAcmeUpdateAccount(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeNewAccount, "/account/"+uuidNameRegex("kid")) -} - -func addFieldsForACMEPath(fields map[string]*framework.FieldSchema, pattern string) map[string]*framework.FieldSchema { - if strings.Contains(pattern, framework.GenericNameRegex("role")) { - fields["role"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: `The desired role for the acme request`, - Required: true, - } - } - if strings.Contains(pattern, framework.GenericNameRegex(issuerRefParam)) { - fields[issuerRefParam] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: `Reference to an existing issuer name or issuer id`, - Required: true, - } - } - - return fields -} - -func addFieldsForACMERequest(fields map[string]*framework.FieldSchema) map[string]*framework.FieldSchema { - fields["protected"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: "ACME request 'protected' value", - Required: false, - } - - fields["payload"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: "ACME request 'payload' value", - Required: false, - } - - fields["signature"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: "ACME request 'signature' value", - Required: false, - } - - return fields -} - -func addFieldsForACMEKidRequest(fields map[string]*framework.FieldSchema, pattern string) map[string]*framework.FieldSchema { - if strings.Contains(pattern, uuidNameRegex("kid")) { - fields["kid"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: `The key identifier provided by the CA`, - Required: true, - } - } - - return fields -} - -func patternAcmeNewAccount(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - addFieldsForACMEKidRequest(fields, pattern) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeParsedWrapper(b.acmeNewAccountHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func (b *backend) acmeNewAccountHandler(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}) (*logical.Response, error) { - // Parameters - var ok bool - var onlyReturnExisting bool - var contacts []string - var termsOfServiceAgreed bool - var status string - var eabData map[string]interface{} - - rawContact, present := data["contact"] - if present { - listContact, ok := rawContact.([]interface{}) - if !ok { - return nil, fmt.Errorf("invalid type (%T) for field 'contact': %w", rawContact, ErrMalformed) - } - - for index, singleContact := range listContact { - contact, ok := singleContact.(string) - if !ok { - return nil, fmt.Errorf("invalid type (%T) for field 'contact' item %d: %w", singleContact, index, ErrMalformed) - } - - contacts = append(contacts, contact) - } - } - - rawTermsOfServiceAgreed, present := data["termsOfServiceAgreed"] - if present { - termsOfServiceAgreed, ok = rawTermsOfServiceAgreed.(bool) - if !ok { - return nil, fmt.Errorf("invalid type (%T) for field 'termsOfServiceAgreed': %w", rawTermsOfServiceAgreed, ErrMalformed) - } - } - - rawOnlyReturnExisting, present := data["onlyReturnExisting"] - if present { - onlyReturnExisting, ok = rawOnlyReturnExisting.(bool) - if !ok { - return nil, fmt.Errorf("invalid type (%T) for field 'onlyReturnExisting': %w", rawOnlyReturnExisting, ErrMalformed) - } - } - - // Per RFC 8555 7.3.6 Account deactivation, we will handle it within our update API. - rawStatus, present := data["status"] - if present { - status, ok = rawStatus.(string) - if !ok { - return nil, fmt.Errorf("invalid type (%T) for field 'onlyReturnExisting': %w", rawOnlyReturnExisting, ErrMalformed) - } - } - - if eabDataRaw, ok := data["externalAccountBinding"]; ok { - eabData, ok = eabDataRaw.(map[string]interface{}) - if !ok { - return nil, fmt.Errorf("%w: externalAccountBinding field was unparseable", ErrMalformed) - } - } - - // We have two paths here: search or create. - if onlyReturnExisting { - return b.acmeAccountSearchHandler(acmeCtx, userCtx) - } - - // Pass through the /new-account API calls to this specific handler as its requirements are different - // from the account update handler. - if strings.HasSuffix(r.Path, "/new-account") { - return b.acmeNewAccountCreateHandler(acmeCtx, userCtx, contacts, termsOfServiceAgreed, r, eabData) - } - - return b.acmeNewAccountUpdateHandler(acmeCtx, userCtx, contacts, status, eabData) -} - -func formatNewAccountResponse(acmeCtx *acmeContext, acct *acmeAccount, eabData map[string]interface{}) *logical.Response { - resp := formatAccountResponse(acmeCtx, acct) - - // Per RFC 8555 Section 7.1.2. Account Objects - // Including this field in a newAccount request indicates approval by - // the holder of an existing non-ACME account to bind that account to - // this ACME account - if acct.Eab != nil && len(eabData) != 0 { - resp.Data["externalAccountBinding"] = eabData - } - - return resp -} - -func formatAccountResponse(acmeCtx *acmeContext, acct *acmeAccount) *logical.Response { - location := acmeCtx.baseUrl.String() + "account/" + acct.KeyId - - resp := &logical.Response{ - Data: map[string]interface{}{ - "status": acct.Status, - "orders": location + "/orders", - }, - Headers: map[string][]string{ - "Location": {location}, - }, - } - - if len(acct.Contact) > 0 { - resp.Data["contact"] = acct.Contact - } - - return resp -} - -func (b *backend) acmeAccountSearchHandler(acmeCtx *acmeContext, userCtx *jwsCtx) (*logical.Response, error) { - thumbprint, err := userCtx.GetKeyThumbprint() - if err != nil { - return nil, fmt.Errorf("failed generating thumbprint for key: %w", err) - } - - account, err := b.acmeState.LoadAccountByKey(acmeCtx, thumbprint) - if err != nil { - return nil, fmt.Errorf("failed to load account by thumbprint: %w", err) - } - - if account != nil { - if err = acmeCtx.eabPolicy.EnforceForExistingAccount(account); err != nil { - return nil, err - } - return formatAccountResponse(acmeCtx, account), nil - } - - // Per RFC 8555 Section 7.3.1. Finding an Account URL Given a Key: - // - // > If a client sends such a request and an account does not exist, - // > then the server MUST return an error response with status code - // > 400 (Bad Request) and type "urn:ietf:params:acme:error:accountDoesNotExist". - return nil, fmt.Errorf("An account with this key does not exist: %w", ErrAccountDoesNotExist) -} - -func (b *backend) acmeNewAccountCreateHandler(acmeCtx *acmeContext, userCtx *jwsCtx, contact []string, termsOfServiceAgreed bool, r *logical.Request, eabData map[string]interface{}) (*logical.Response, error) { - if userCtx.Existing { - return nil, fmt.Errorf("cannot submit to newAccount with 'kid': %w", ErrMalformed) - } - - // If the account already exists, return the existing one. - thumbprint, err := userCtx.GetKeyThumbprint() - if err != nil { - return nil, fmt.Errorf("failed generating thumbprint for key: %w", err) - } - - accountByKey, err := b.acmeState.LoadAccountByKey(acmeCtx, thumbprint) - if err != nil { - return nil, fmt.Errorf("failed to load account by thumbprint: %w", err) - } - - if accountByKey != nil { - if err = acmeCtx.eabPolicy.EnforceForExistingAccount(accountByKey); err != nil { - return nil, err - } - return formatAccountResponse(acmeCtx, accountByKey), nil - } - - var eab *eabType - if len(eabData) != 0 { - eab, err = verifyEabPayload(b.acmeState, acmeCtx, userCtx, r.Path, eabData) - if err != nil { - return nil, err - } - } - - // Verify against our EAB policy - if err = acmeCtx.eabPolicy.EnforceForNewAccount(eab); err != nil { - return nil, err - } - - // TODO: Limit this only when ToS are required or set by the operator, since we don't have a - // ToS URL in the directory at the moment, we can not enforce this. - //if !termsOfServiceAgreed { - // return nil, fmt.Errorf("terms of service not agreed to: %w", ErrUserActionRequired) - //} - - if eab != nil { - // We delete the EAB to prevent future re-use after associating it with an account, worst - // case if we fail creating the account we simply nuked the EAB which they can create another - // and retry - wasDeleted, err := b.acmeState.DeleteEab(acmeCtx.sc, eab.KeyID) - if err != nil { - return nil, fmt.Errorf("failed to delete eab reference: %w", err) - } - - if !wasDeleted { - // Something consumed our EAB before we did bail... - return nil, fmt.Errorf("eab was already used: %w", ErrUnauthorized) - } - } - - b.acmeAccountLock.RLock() // Prevents Account Creation and Tidy Interfering - defer b.acmeAccountLock.RUnlock() - - accountByKid, err := b.acmeState.CreateAccount(acmeCtx, userCtx, contact, termsOfServiceAgreed, eab) - if err != nil { - if eab != nil { - return nil, fmt.Errorf("failed to create account: %w; the EAB key used for this request has been deleted as a result of this operation; fetch a new EAB key before retrying", err) - } - return nil, fmt.Errorf("failed to create account: %w", err) - } - - resp := formatNewAccountResponse(acmeCtx, accountByKid, eabData) - - // Per RFC 8555 Section 7.3. Account Management: - // - // > The server returns this account object in a 201 (Created) response, - // > with the account URL in a Location header field. - resp.Data[logical.HTTPStatusCode] = http.StatusCreated - return resp, nil -} - -func (b *backend) acmeNewAccountUpdateHandler(acmeCtx *acmeContext, userCtx *jwsCtx, contact []string, status string, eabData map[string]interface{}) (*logical.Response, error) { - if !userCtx.Existing { - return nil, fmt.Errorf("cannot submit to account updates without a 'kid': %w", ErrMalformed) - } - - if len(eabData) != 0 { - return nil, fmt.Errorf("%w: not allowed to update EAB data in accounts", ErrMalformed) - } - - account, err := b.acmeState.LoadAccount(acmeCtx, userCtx.Kid) - if err != nil { - return nil, fmt.Errorf("error loading account: %w", err) - } - - if err = acmeCtx.eabPolicy.EnforceForExistingAccount(account); err != nil { - return nil, err - } - - // Per RFC 8555 7.3.6 Account deactivation, if we were previously deactivated, we should return - // unauthorized. There is no way to reactivate any accounts per ACME RFC. - if account.Status != AccountStatusValid { - // Treating "revoked" and "deactivated" as the same here. - return nil, ErrUnauthorized - } - - shouldUpdate := false - // Check to see if we should update, we don't really care about ordering - if !strutil.EquivalentSlices(account.Contact, contact) { - shouldUpdate = true - account.Contact = contact - } - - // Check to process account de-activation status was requested. - // 7.3.6. Account Deactivation - if string(AccountStatusDeactivated) == status { - shouldUpdate = true - // TODO: This should cancel any ongoing operations (do not revoke certs), - // perhaps we should delete this account here? - account.Status = AccountStatusDeactivated - account.AccountRevokedDate = time.Now() - } - - if shouldUpdate { - err = b.acmeState.UpdateAccount(acmeCtx, account) - if err != nil { - return nil, fmt.Errorf("failed to update account: %w", err) - } - } - - resp := formatAccountResponse(acmeCtx, account) - return resp, nil -} - -func (b *backend) tidyAcmeAccountByThumbprint(as *acmeState, ac *acmeContext, keyThumbprint string, certTidyBuffer, accountTidyBuffer time.Duration) error { - thumbprintEntry, err := ac.sc.Storage.Get(ac.sc.Context, path.Join(acmeThumbprintPrefix, keyThumbprint)) - if err != nil { - return fmt.Errorf("error retrieving thumbprint entry %v, unable to find corresponding account entry: %w", keyThumbprint, err) - } - if thumbprintEntry == nil { - return fmt.Errorf("empty thumbprint entry %v, unable to find corresponding account entry", keyThumbprint) - } - - var thumbprint acmeThumbprint - err = thumbprintEntry.DecodeJSON(&thumbprint) - if err != nil { - return fmt.Errorf("unable to decode thumbprint entry %v to find account entry: %w", keyThumbprint, err) - } - - if len(thumbprint.Kid) == 0 { - return fmt.Errorf("unable to find account entry: empty kid within thumbprint entry: %s", keyThumbprint) - } - - // Now Get the Account: - accountEntry, err := ac.sc.Storage.Get(ac.sc.Context, acmeAccountPrefix+thumbprint.Kid) - if err != nil { - return err - } - if accountEntry == nil { - // We delete the Thumbprint Associated with the Account, and we are done - err = ac.sc.Storage.Delete(ac.sc.Context, path.Join(acmeThumbprintPrefix, keyThumbprint)) - if err != nil { - return err - } - b.tidyStatusIncDeletedAcmeAccountCount() - return nil - } - - var account acmeAccount - err = accountEntry.DecodeJSON(&account) - if err != nil { - return err - } - - // Tidy Orders On the Account - orderIds, err := as.ListOrderIds(ac, thumbprint.Kid) - if err != nil { - return err - } - allOrdersTidied := true - maxCertExpiryUpdated := false - for _, orderId := range orderIds { - wasTidied, orderExpiry, err := b.acmeTidyOrder(ac, thumbprint.Kid, getOrderPath(thumbprint.Kid, orderId), certTidyBuffer) - if err != nil { - return err - } - if !wasTidied { - allOrdersTidied = false - } - - if !orderExpiry.IsZero() && account.MaxCertExpiry.Before(orderExpiry) { - account.MaxCertExpiry = orderExpiry - maxCertExpiryUpdated = true - } - } - - now := time.Now() - if allOrdersTidied && - now.After(account.AccountCreatedDate.Add(accountTidyBuffer)) && - now.After(account.MaxCertExpiry.Add(accountTidyBuffer)) { - // Tidy this account - // If it is Revoked or Deactivated: - if (account.Status == AccountStatusRevoked || account.Status == AccountStatusDeactivated) && now.After(account.AccountRevokedDate.Add(accountTidyBuffer)) { - // We Delete the Account Associated with this Thumbprint: - err = ac.sc.Storage.Delete(ac.sc.Context, path.Join(acmeAccountPrefix, thumbprint.Kid)) - if err != nil { - return err - } - - // Now we delete the Thumbprint Associated with the Account: - err = ac.sc.Storage.Delete(ac.sc.Context, path.Join(acmeThumbprintPrefix, keyThumbprint)) - if err != nil { - return err - } - b.tidyStatusIncDeletedAcmeAccountCount() - } else if account.Status == AccountStatusValid { - // Revoke This Account - account.AccountRevokedDate = now - account.Status = AccountStatusRevoked - err := as.UpdateAccount(ac, &account) - if err != nil { - return err - } - b.tidyStatusIncRevAcmeAccountCount() - } - } - - // Only update the account if we modified the max cert expiry values and the account is still valid, - // to prevent us from adding back a deleted account or not re-writing the revoked account that was - // already written above. - if maxCertExpiryUpdated && account.Status == AccountStatusValid { - // Update our expiry time we previously setup. - err := as.UpdateAccount(ac, &account) - if err != nil { - return err - } - } - - return nil -} diff --git a/builtin/logical/pki/path_acme_authorizations.go b/builtin/logical/pki/path_acme_authorizations.go deleted file mode 100644 index 9914491f619ee9..00000000000000 --- a/builtin/logical/pki/path_acme_authorizations.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "fmt" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -func pathAcmeAuthorization(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeAuthorization, "/authorization/"+framework.MatchAllRegex("auth_id")) -} - -func addFieldsForACMEAuthorization(fields map[string]*framework.FieldSchema) map[string]*framework.FieldSchema { - fields["auth_id"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: "ACME authorization identifier value", - Required: true, - } - - return fields -} - -func patternAcmeAuthorization(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - addFieldsForACMEAuthorization(fields) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeAccountRequiredWrapper(b.acmeAuthorizationHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func (b *backend) acmeAuthorizationHandler(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}, _ *acmeAccount) (*logical.Response, error) { - authId := fields.Get("auth_id").(string) - authz, err := b.acmeState.LoadAuthorization(acmeCtx, userCtx, authId) - if err != nil { - return nil, fmt.Errorf("failed to load authorization: %w", err) - } - - var status string - rawStatus, haveStatus := data["status"] - if haveStatus { - var ok bool - status, ok = rawStatus.(string) - if !ok { - return nil, fmt.Errorf("bad type (%T) for value 'status': %w", rawStatus, ErrMalformed) - } - } - - if len(data) == 0 { - return b.acmeAuthorizationFetchHandler(acmeCtx, r, fields, userCtx, data, authz) - } - - if haveStatus && status == "deactivated" { - return b.acmeAuthorizationDeactivateHandler(acmeCtx, r, fields, userCtx, data, authz) - } - - return nil, ErrMalformed -} - -func (b *backend) acmeAuthorizationFetchHandler(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}, authz *ACMEAuthorization) (*logical.Response, error) { - return &logical.Response{ - Data: authz.NetworkMarshal(acmeCtx), - }, nil -} - -func (b *backend) acmeAuthorizationDeactivateHandler(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}, authz *ACMEAuthorization) (*logical.Response, error) { - if authz.Status != ACMEAuthorizationPending && authz.Status != ACMEAuthorizationValid { - return nil, fmt.Errorf("unable to deactivate authorization in '%v' status: %w", authz.Status, ErrMalformed) - } - - authz.Status = ACMEAuthorizationDeactivated - for _, challenge := range authz.Challenges { - challenge.Status = ACMEChallengeInvalid - } - - if err := b.acmeState.SaveAuthorization(acmeCtx, authz); err != nil { - return nil, fmt.Errorf("error saving deactivated authorization: %w", err) - } - - return &logical.Response{ - Data: authz.NetworkMarshal(acmeCtx), - }, nil -} diff --git a/builtin/logical/pki/path_acme_challenges.go b/builtin/logical/pki/path_acme_challenges.go deleted file mode 100644 index cded9d32a3c9cd..00000000000000 --- a/builtin/logical/pki/path_acme_challenges.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "fmt" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -func pathAcmeChallenge(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeChallenge, - "/challenge/"+framework.MatchAllRegex("auth_id")+"/"+framework.MatchAllRegex("challenge_type")) -} - -func addFieldsForACMEChallenge(fields map[string]*framework.FieldSchema) map[string]*framework.FieldSchema { - fields["auth_id"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: "ACME authorization identifier value", - Required: true, - } - - fields["challenge_type"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: "ACME challenge type", - Required: true, - } - - return fields -} - -func patternAcmeChallenge(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - addFieldsForACMEChallenge(fields) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeAccountRequiredWrapper(b.acmeChallengeHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func (b *backend) acmeChallengeHandler(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}, _ *acmeAccount) (*logical.Response, error) { - authId := fields.Get("auth_id").(string) - challengeType := fields.Get("challenge_type").(string) - - authz, err := b.acmeState.LoadAuthorization(acmeCtx, userCtx, authId) - if err != nil { - return nil, fmt.Errorf("failed to load authorization: %w", err) - } - - return b.acmeChallengeFetchHandler(acmeCtx, r, fields, userCtx, data, authz, challengeType) -} - -func (b *backend) acmeChallengeFetchHandler(acmeCtx *acmeContext, r *logical.Request, fields *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}, authz *ACMEAuthorization, challengeType string) (*logical.Response, error) { - var challenge *ACMEChallenge - for _, c := range authz.Challenges { - if string(c.Type) == challengeType { - challenge = c - break - } - } - - if challenge == nil { - return nil, fmt.Errorf("unknown challenge of type '%v' in authorization: %w", challengeType, ErrMalformed) - } - - // Per RFC 8555 Section 7.5.1. Responding to Challenges: - // - // > The client indicates to the server that it is ready for the challenge - // > validation by sending an empty JSON body ("{}") carried in a POST - // > request to the challenge URL (not the authorization URL). - if len(data) > 0 { - return nil, fmt.Errorf("unexpected request parameters: %w", ErrMalformed) - } - - // If data was nil, we got a POST-as-GET request, just return current challenge without an accept, - // otherwise we most likely got a "{}" payload which we should now accept the challenge. - if data != nil { - thumbprint, err := userCtx.GetKeyThumbprint() - if err != nil { - return nil, fmt.Errorf("failed to get thumbprint for key: %w", err) - } - - if err := b.acmeState.validator.AcceptChallenge(acmeCtx.sc, userCtx.Kid, authz, challenge, thumbprint); err != nil { - return nil, fmt.Errorf("error submitting challenge for validation: %w", err) - } - } - - return &logical.Response{ - Data: challenge.NetworkMarshal(acmeCtx, authz.Id), - - // Per RFC 8555 Section 7.1. Resources: - // - // > The "up" link relation is used with challenge resources to indicate - // > the authorization resource to which a challenge belongs. - Headers: map[string][]string{ - "Link": {fmt.Sprintf("<%s>;rel=\"up\"", buildAuthorizationUrl(acmeCtx, authz.Id))}, - }, - }, nil -} diff --git a/builtin/logical/pki/path_acme_directory.go b/builtin/logical/pki/path_acme_directory.go deleted file mode 100644 index e556b35db5bd9a..00000000000000 --- a/builtin/logical/pki/path_acme_directory.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "encoding/json" - "fmt" - "net/http" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -const ( - pathAcmeHelpSync = `An endpoint implementing the standard ACME protocol` - pathAcmeHelpDesc = `This API endpoint implementing a subset of the ACME protocol - defined in RFC 8555, with its own authentication and argument syntax that - does not follow conventional Vault operations. An ACME client tool or library - should be used to interact with these endpoints.` -) - -func pathAcmeDirectory(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeDirectory, "/directory") -} - -func patternAcmeDirectory(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.acmeWrapper(b.acmeDirectoryHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func (b *backend) acmeDirectoryHandler(acmeCtx *acmeContext, r *logical.Request, _ *framework.FieldData) (*logical.Response, error) { - rawBody, err := json.Marshal(map[string]interface{}{ - "newNonce": acmeCtx.baseUrl.JoinPath("new-nonce").String(), - "newAccount": acmeCtx.baseUrl.JoinPath("new-account").String(), - "newOrder": acmeCtx.baseUrl.JoinPath("new-order").String(), - "revokeCert": acmeCtx.baseUrl.JoinPath("revoke-cert").String(), - "keyChange": acmeCtx.baseUrl.JoinPath("key-change").String(), - // This is purposefully missing newAuthz as we don't support pre-authorization - "meta": map[string]interface{}{ - "externalAccountRequired": acmeCtx.eabPolicy.IsExternalAccountRequired(), - }, - }) - if err != nil { - return nil, fmt.Errorf("failed encoding response: %w", err) - } - - return &logical.Response{ - Data: map[string]interface{}{ - logical.HTTPContentType: "application/json", - logical.HTTPStatusCode: http.StatusOK, - logical.HTTPRawBody: rawBody, - }, - }, nil -} diff --git a/builtin/logical/pki/path_acme_eab.go b/builtin/logical/pki/path_acme_eab.go deleted file mode 100644 index d575d6c453d1fe..00000000000000 --- a/builtin/logical/pki/path_acme_eab.go +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "context" - "crypto/rand" - "encoding/base64" - "fmt" - "path" - "time" - - "github.com/hashicorp/go-uuid" - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -var decodedTokenPrefix = mustBase64Decode("vault-eab-0-") - -func mustBase64Decode(s string) []byte { - bytes, err := base64.RawURLEncoding.DecodeString(s) - if err != nil { - panic(fmt.Sprintf("Token prefix value: %s failed decoding: %v", s, err)) - } - - // Should be dividable by 3 otherwise our prefix will not be properly honored. - if len(bytes)%3 != 0 { - panic(fmt.Sprintf("Token prefix value: %s is not dividable by 3, will not prefix properly", s)) - } - return bytes -} - -/* - * This file unlike the other path_acme_xxx.go are VAULT APIs to manage the - * ACME External Account Bindings, this isn't providing any APIs that an ACME - * client would use. - */ -func pathAcmeEabList(b *backend) *framework.Path { - return &framework.Path{ - Pattern: "eab/?$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - - Fields: map[string]*framework.FieldSchema{}, - - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ListOperation: &framework.PathOperation{ - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "list-eab-key", - OperationSuffix: "acme", - }, - Callback: b.pathAcmeListEab, - }, - }, - - HelpSynopsis: "list external account bindings to be used for ACME", - HelpDescription: `list identifiers that have been generated but yet to be used.`, - } -} - -func pathAcmeNewEab(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeNewEab, "/new-eab") -} - -func patternAcmeNewEab(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathAcmeCreateEab, - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "generate-eab-key", - OperationSuffix: "acme", - }, - }, - }, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - - HelpSynopsis: "Generate external account bindings to be used for ACME", - HelpDescription: `Generate single use id/key pairs to be used for ACME EAB.`, - } -} - -func pathAcmeEabDelete(b *backend) *framework.Path { - return &framework.Path{ - Pattern: "eab/" + uuidNameRegex("key_id"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - - Fields: map[string]*framework.FieldSchema{ - "key_id": { - Type: framework.TypeString, - Description: "EAB key identifier", - Required: true, - }, - }, - - Operations: map[logical.Operation]framework.OperationHandler{ - logical.DeleteOperation: &framework.PathOperation{ - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "acme-configuration", - }, - Callback: b.pathAcmeDeleteEab, - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: "Delete an external account binding id prior to its use within an ACME account", - HelpDescription: `Allows an operator to delete an external account binding, -before its bound to a new ACME account. If the identifier provided does not exist or -was already consumed by an ACME account a successful response is returned along with -a warning that it did not exist.`, - } -} - -type eabType struct { - KeyID string `json:"-"` - KeyType string `json:"key-type"` - PrivateBytes []byte `json:"private-bytes"` - AcmeDirectory string `json:"acme-directory"` - CreatedOn time.Time `json:"created-on"` -} - -func (b *backend) pathAcmeListEab(ctx context.Context, r *logical.Request, _ *framework.FieldData) (*logical.Response, error) { - sc := b.makeStorageContext(ctx, r.Storage) - - eabIds, err := b.acmeState.ListEabIds(sc) - if err != nil { - return nil, err - } - - var warnings []string - var keyIds []string - keyInfos := map[string]interface{}{} - - for _, eabKey := range eabIds { - eab, err := b.acmeState.LoadEab(sc, eabKey) - if err != nil { - warnings = append(warnings, fmt.Sprintf("failed loading eab entry %s: %v", eabKey, err)) - continue - } - - keyIds = append(keyIds, eab.KeyID) - keyInfos[eab.KeyID] = map[string]interface{}{ - "key_type": eab.KeyType, - "acme_directory": path.Join(eab.AcmeDirectory, "directory"), - "created_on": eab.CreatedOn.Format(time.RFC3339), - } - } - - resp := logical.ListResponseWithInfo(keyIds, keyInfos) - for _, warning := range warnings { - resp.AddWarning(warning) - } - return resp, nil -} - -func (b *backend) pathAcmeCreateEab(ctx context.Context, r *logical.Request, data *framework.FieldData) (*logical.Response, error) { - kid := genUuid() - size := 32 - bytes, err := uuid.GenerateRandomBytesWithReader(size, rand.Reader) - if err != nil { - return nil, fmt.Errorf("failed generating eab key: %w", err) - } - - acmeDirectory, err := getAcmeDirectory(r) - if err != nil { - return nil, err - } - - eab := &eabType{ - KeyID: kid, - KeyType: "hs", - PrivateBytes: append(decodedTokenPrefix, bytes...), // we do this to avoid generating tokens that start with - - AcmeDirectory: acmeDirectory, - CreatedOn: time.Now(), - } - - sc := b.makeStorageContext(ctx, r.Storage) - err = b.acmeState.SaveEab(sc, eab) - if err != nil { - return nil, fmt.Errorf("failed saving generated eab: %w", err) - } - - encodedKey := base64.RawURLEncoding.EncodeToString(eab.PrivateBytes) - - return &logical.Response{ - Data: map[string]interface{}{ - "id": eab.KeyID, - "key_type": eab.KeyType, - "key": encodedKey, - "acme_directory": path.Join(eab.AcmeDirectory, "directory"), - "created_on": eab.CreatedOn.Format(time.RFC3339), - }, - }, nil -} - -func (b *backend) pathAcmeDeleteEab(ctx context.Context, r *logical.Request, d *framework.FieldData) (*logical.Response, error) { - sc := b.makeStorageContext(ctx, r.Storage) - keyId := d.Get("key_id").(string) - - _, err := uuid.ParseUUID(keyId) - if err != nil { - return nil, fmt.Errorf("badly formatted key_id field") - } - - deleted, err := b.acmeState.DeleteEab(sc, keyId) - if err != nil { - return nil, fmt.Errorf("failed deleting key id: %w", err) - } - - resp := &logical.Response{} - if !deleted { - resp.AddWarning("No key id found with id: " + keyId) - } - return resp, nil -} diff --git a/builtin/logical/pki/path_acme_nonce.go b/builtin/logical/pki/path_acme_nonce.go deleted file mode 100644 index e973039a2226fd..00000000000000 --- a/builtin/logical/pki/path_acme_nonce.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "fmt" - "net/http" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -func pathAcmeNonce(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeNonce, "/new-nonce") -} - -func patternAcmeNonce(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.HeaderOperation: &framework.PathOperation{ - Callback: b.acmeWrapper(b.acmeNonceHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - logical.ReadOperation: &framework.PathOperation{ - Callback: b.acmeWrapper(b.acmeNonceHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func (b *backend) acmeNonceHandler(ctx *acmeContext, r *logical.Request, _ *framework.FieldData) (*logical.Response, error) { - nonce, _, err := b.acmeState.GetNonce() - if err != nil { - return nil, err - } - - // Header operations return 200, GET return 204. - httpStatus := http.StatusOK - if r.Operation == logical.ReadOperation { - httpStatus = http.StatusNoContent - } - - return &logical.Response{ - Headers: map[string][]string{ - "Cache-Control": {"no-store"}, - "Replay-Nonce": {nonce}, - "Link": genAcmeLinkHeader(ctx), - }, - Data: map[string]interface{}{ - logical.HTTPStatusCode: httpStatus, - // Get around Vault limitation of requiring a body set if the status is not http.StatusNoContent - // for our HEAD request responses. - logical.HTTPContentType: "", - }, - }, nil -} - -func genAcmeLinkHeader(ctx *acmeContext) []string { - path := fmt.Sprintf("<%s>;rel=\"index\"", ctx.baseUrl.JoinPath("directory").String()) - return []string{path} -} diff --git a/builtin/logical/pki/path_acme_order.go b/builtin/logical/pki/path_acme_order.go deleted file mode 100644 index eb2ffaf9e681ef..00000000000000 --- a/builtin/logical/pki/path_acme_order.go +++ /dev/null @@ -1,1084 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "crypto/x509" - "encoding/base64" - "encoding/pem" - "fmt" - "net" - "net/http" - "sort" - "strings" - "time" - - "github.com/hashicorp/vault/sdk/helper/strutil" - - "github.com/hashicorp/vault/sdk/helper/certutil" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" - "golang.org/x/net/idna" -) - -var maxAcmeCertTTL = 90 * (24 * time.Hour) - -func pathAcmeListOrders(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeListOrders, "/orders") -} - -func pathAcmeGetOrder(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeGetOrder, "/order/"+uuidNameRegex("order_id")) -} - -func pathAcmeNewOrder(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeNewOrder, "/new-order") -} - -func pathAcmeFinalizeOrder(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeFinalizeOrder, "/order/"+uuidNameRegex("order_id")+"/finalize") -} - -func pathAcmeFetchOrderCert(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeFetchOrderCert, "/order/"+uuidNameRegex("order_id")+"/cert") -} - -func patternAcmeNewOrder(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeAccountRequiredWrapper(b.acmeNewOrderHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func patternAcmeListOrders(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeAccountRequiredWrapper(b.acmeListOrdersHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func patternAcmeGetOrder(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - addFieldsForACMEOrder(fields) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeAccountRequiredWrapper(b.acmeGetOrderHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func patternAcmeFinalizeOrder(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - addFieldsForACMEOrder(fields) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeAccountRequiredWrapper(b.acmeFinalizeOrderHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func patternAcmeFetchOrderCert(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - addFieldsForACMEOrder(fields) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeAccountRequiredWrapper(b.acmeFetchCertOrderHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func addFieldsForACMEOrder(fields map[string]*framework.FieldSchema) { - fields["order_id"] = &framework.FieldSchema{ - Type: framework.TypeString, - Description: `The ACME order identifier to fetch`, - Required: true, - } -} - -func (b *backend) acmeFetchCertOrderHandler(ac *acmeContext, _ *logical.Request, fields *framework.FieldData, uc *jwsCtx, data map[string]interface{}, _ *acmeAccount) (*logical.Response, error) { - orderId := fields.Get("order_id").(string) - - order, err := b.acmeState.LoadOrder(ac, uc, orderId) - if err != nil { - return nil, err - } - - if order.Status != ACMEOrderValid { - return nil, fmt.Errorf("%w: order is status %s, needs to be in valid state", ErrOrderNotReady, order.Status) - } - - if len(order.IssuerId) == 0 || len(order.CertificateSerialNumber) == 0 { - return nil, fmt.Errorf("order is missing required fields to load certificate") - } - - certEntry, err := fetchCertBySerial(ac.sc, "certs/", order.CertificateSerialNumber) - if err != nil { - return nil, fmt.Errorf("failed reading certificate %s from storage: %w", order.CertificateSerialNumber, err) - } - if certEntry == nil || len(certEntry.Value) == 0 { - return nil, fmt.Errorf("missing certificate %s from storage", order.CertificateSerialNumber) - } - - cert, err := x509.ParseCertificate(certEntry.Value) - if err != nil { - return nil, fmt.Errorf("failed parsing certificate %s: %w", order.CertificateSerialNumber, err) - } - - issuer, err := ac.sc.fetchIssuerById(order.IssuerId) - if err != nil { - return nil, fmt.Errorf("failed loading certificate issuer %s from storage: %w", order.IssuerId, err) - } - - allPems, err := func() ([]byte, error) { - leafPEM := pem.EncodeToMemory(&pem.Block{ - Type: "CERTIFICATE", - Bytes: cert.Raw, - }) - - chains := []byte(issuer.Certificate) - for _, chainVal := range issuer.CAChain { - if chainVal == issuer.Certificate { - continue - } - chains = append(chains, []byte(chainVal)...) - } - - return append(leafPEM, chains...), nil - }() - if err != nil { - return nil, fmt.Errorf("failed encoding certificate ca chain: %w", err) - } - - return &logical.Response{ - Data: map[string]interface{}{ - logical.HTTPContentType: "application/pem-certificate-chain", - logical.HTTPStatusCode: http.StatusOK, - logical.HTTPRawBody: allPems, - }, - }, nil -} - -func (b *backend) acmeFinalizeOrderHandler(ac *acmeContext, _ *logical.Request, fields *framework.FieldData, uc *jwsCtx, data map[string]interface{}, account *acmeAccount) (*logical.Response, error) { - orderId := fields.Get("order_id").(string) - - csr, err := parseCsrFromFinalize(data) - if err != nil { - return nil, err - } - - order, err := b.acmeState.LoadOrder(ac, uc, orderId) - if err != nil { - return nil, err - } - - order.Status, err = computeOrderStatus(ac, uc, order) - if err != nil { - return nil, err - } - - if order.Status != ACMEOrderReady { - return nil, fmt.Errorf("%w: order is status %s, needs to be in ready state", ErrOrderNotReady, order.Status) - } - - now := time.Now() - if !order.Expires.IsZero() && now.After(order.Expires) { - return nil, fmt.Errorf("%w: order %s is expired", ErrMalformed, orderId) - } - - if err = validateCsrMatchesOrder(csr, order); err != nil { - return nil, err - } - - if err = validateCsrNotUsingAccountKey(csr, uc); err != nil { - return nil, err - } - - signedCertBundle, issuerId, err := issueCertFromCsr(ac, csr) - if err != nil { - return nil, err - } - - hyphenSerialNumber := normalizeSerialFromBigInt(signedCertBundle.Certificate.SerialNumber) - err = storeCertificate(ac.sc, signedCertBundle) - if err != nil { - return nil, err - } - - if err := b.acmeState.TrackIssuedCert(ac, order.AccountId, hyphenSerialNumber, order.OrderId); err != nil { - b.Logger().Warn("orphaned generated ACME certificate due to error saving account->cert->order reference", "serial_number", hyphenSerialNumber, "error", err) - return nil, err - } - - order.Status = ACMEOrderValid - order.CertificateSerialNumber = hyphenSerialNumber - order.CertificateExpiry = signedCertBundle.Certificate.NotAfter - order.IssuerId = issuerId - - err = b.acmeState.SaveOrder(ac, order) - if err != nil { - b.Logger().Warn("orphaned generated ACME certificate due to error saving order", "serial_number", hyphenSerialNumber, "error", err) - return nil, fmt.Errorf("failed saving updated order: %w", err) - } - - if err := b.doTrackBilling(ac.sc.Context, order.Identifiers); err != nil { - b.Logger().Error("failed to track billing for order", "order", orderId, "error", err) - err = nil - } - - return formatOrderResponse(ac, order), nil -} - -func computeOrderStatus(ac *acmeContext, uc *jwsCtx, order *acmeOrder) (ACMEOrderStatusType, error) { - // If we reached a final stage, no use computing anything else - if order.Status == ACMEOrderInvalid || order.Status == ACMEOrderValid { - return order.Status, nil - } - - // We aren't in a final state yet, check for expiry - if time.Now().After(order.Expires) { - return ACMEOrderInvalid, nil - } - - // Intermediary steps passed authorizations should short circuit us as well - if order.Status == ACMEOrderReady || order.Status == ACMEOrderProcessing { - return order.Status, nil - } - - // If we have no authorizations attached to the order, nothing to compute either - if len(order.AuthorizationIds) == 0 { - return ACMEOrderPending, nil - } - - anyFailed := false - allPassed := true - for _, authId := range order.AuthorizationIds { - authorization, err := ac.getAcmeState().LoadAuthorization(ac, uc, authId) - if err != nil { - return order.Status, fmt.Errorf("failed loading authorization: %s: %w", authId, err) - } - - if authorization.Status == ACMEAuthorizationPending { - allPassed = false - continue - } - - if authorization.Status != ACMEAuthorizationValid { - // Per RFC 8555 - 7.1.6. Status Changes - // The order also moves to the "invalid" state if it expires or - // one of its authorizations enters a final state other than - // "valid" ("expired", "revoked", or "deactivated"). - allPassed = false - anyFailed = true - break - } - } - - if anyFailed { - return ACMEOrderInvalid, nil - } - - if allPassed { - return ACMEOrderReady, nil - } - - // The order has not expired, no authorizations have yet to be marked as failed - // nor have we passed them all. - return ACMEOrderPending, nil -} - -func validateCsrNotUsingAccountKey(csr *x509.CertificateRequest, uc *jwsCtx) error { - csrKey := csr.PublicKey - userKey := uc.Key.Public().Key - - sameKey, err := certutil.ComparePublicKeysAndType(csrKey, userKey) - if err != nil { - return err - } - - if sameKey { - return fmt.Errorf("%w: certificate public key must not match account key", ErrBadCSR) - } - - return nil -} - -func validateCsrMatchesOrder(csr *x509.CertificateRequest, order *acmeOrder) error { - csrDNSIdentifiers, csrIPIdentifiers := getIdentifiersFromCSR(csr) - orderDNSIdentifiers := strutil.RemoveDuplicates(order.getIdentifierDNSValues(), true) - orderIPIdentifiers := removeDuplicatesAndSortIps(order.getIdentifierIPValues()) - - if len(orderDNSIdentifiers) == 0 && len(orderIPIdentifiers) == 0 { - return fmt.Errorf("%w: order did not include any identifiers", ErrServerInternal) - } - - if len(orderDNSIdentifiers) != len(csrDNSIdentifiers) { - return fmt.Errorf("%w: Order (%v) and CSR (%v) mismatch on number of DNS identifiers", ErrBadCSR, len(orderDNSIdentifiers), len(csrDNSIdentifiers)) - } - - if len(orderIPIdentifiers) != len(csrIPIdentifiers) { - return fmt.Errorf("%w: Order (%v) and CSR (%v) mismatch on number of IP identifiers", ErrBadCSR, len(orderIPIdentifiers), len(csrIPIdentifiers)) - } - - for i, identifier := range orderDNSIdentifiers { - if identifier != csrDNSIdentifiers[i] { - return fmt.Errorf("%w: CSR is missing order DNS identifier %s", ErrBadCSR, identifier) - } - } - - for i, identifier := range orderIPIdentifiers { - if !identifier.Equal(csrIPIdentifiers[i]) { - return fmt.Errorf("%w: CSR is missing order IP identifier %s", ErrBadCSR, identifier.String()) - } - } - - // Since we do not support NotBefore/NotAfter dates at this time no need to validate CSR/Order match. - - return nil -} - -func (b *backend) validateIdentifiersAgainstRole(role *roleEntry, identifiers []*ACMEIdentifier) error { - for _, identifier := range identifiers { - switch identifier.Type { - case ACMEDNSIdentifier: - data := &inputBundle{ - role: role, - req: &logical.Request{}, - apiData: &framework.FieldData{}, - } - - if validateNames(b, data, []string{identifier.OriginalValue}) != "" { - return fmt.Errorf("%w: role (%s) will not issue certificate for name %v", - ErrRejectedIdentifier, role.Name, identifier.OriginalValue) - } - case ACMEIPIdentifier: - if !role.AllowIPSANs { - return fmt.Errorf("%w: role (%s) does not allow IP sans, so cannot issue certificate for %v", - ErrRejectedIdentifier, role.Name, identifier.OriginalValue) - } - default: - return fmt.Errorf("unknown type of identifier: %v for %v", identifier.Type, identifier.OriginalValue) - } - } - - return nil -} - -func getIdentifiersFromCSR(csr *x509.CertificateRequest) ([]string, []net.IP) { - dnsIdentifiers := append([]string(nil), csr.DNSNames...) - ipIdentifiers := append([]net.IP(nil), csr.IPAddresses...) - - if csr.Subject.CommonName != "" { - ip := net.ParseIP(csr.Subject.CommonName) - if ip != nil { - ipIdentifiers = append(ipIdentifiers, ip) - } else { - dnsIdentifiers = append(dnsIdentifiers, csr.Subject.CommonName) - } - } - - return strutil.RemoveDuplicates(dnsIdentifiers, true), removeDuplicatesAndSortIps(ipIdentifiers) -} - -func removeDuplicatesAndSortIps(ipIdentifiers []net.IP) []net.IP { - var uniqueIpIdentifiers []net.IP - for _, ip := range ipIdentifiers { - found := false - for _, curIp := range uniqueIpIdentifiers { - if curIp.Equal(ip) { - found = true - } - } - - if !found { - uniqueIpIdentifiers = append(uniqueIpIdentifiers, ip) - } - } - - sort.Slice(uniqueIpIdentifiers, func(i, j int) bool { - return uniqueIpIdentifiers[i].String() < uniqueIpIdentifiers[j].String() - }) - return uniqueIpIdentifiers -} - -func storeCertificate(sc *storageContext, signedCertBundle *certutil.ParsedCertBundle) error { - hyphenSerialNumber := normalizeSerialFromBigInt(signedCertBundle.Certificate.SerialNumber) - key := "certs/" + hyphenSerialNumber - certsCounted := sc.Backend.certsCounted.Load() - err := sc.Storage.Put(sc.Context, &logical.StorageEntry{ - Key: key, - Value: signedCertBundle.CertificateBytes, - }) - if err != nil { - return fmt.Errorf("unable to store certificate locally: %w", err) - } - sc.Backend.ifCountEnabledIncrementTotalCertificatesCount(certsCounted, key) - return nil -} - -func maybeAugmentReqDataWithSuitableCN(ac *acmeContext, csr *x509.CertificateRequest, data *framework.FieldData) { - // Role doesn't require a CN, so we don't care. - if !ac.role.RequireCN { - return - } - - // CSR contains a CN, so use that one. - if csr.Subject.CommonName != "" { - return - } - - // Choose a CN in the order wildcard -> DNS -> IP -> fail. - for _, name := range csr.DNSNames { - if strings.Contains(name, "*") { - data.Raw["common_name"] = name - return - } - } - if len(csr.DNSNames) > 0 { - data.Raw["common_name"] = csr.DNSNames[0] - return - } - if len(csr.IPAddresses) > 0 { - data.Raw["common_name"] = csr.IPAddresses[0].String() - return - } -} - -func issueCertFromCsr(ac *acmeContext, csr *x509.CertificateRequest) (*certutil.ParsedCertBundle, issuerID, error) { - pemBlock := &pem.Block{ - Type: "CERTIFICATE REQUEST", - Headers: nil, - Bytes: csr.Raw, - } - pemCsr := string(pem.EncodeToMemory(pemBlock)) - - data := &framework.FieldData{ - Raw: map[string]interface{}{ - "csr": pemCsr, - }, - Schema: getCsrSignVerbatimSchemaFields(), - } - - // XXX: Usability hack: by default, minimalist roles have require_cn=true, - // but some ACME clients do not provision one in the certificate as modern - // (TLS) clients are mostly verifying against server's DNS SANs. - maybeAugmentReqDataWithSuitableCN(ac, csr, data) - - signingBundle, issuerId, err := ac.sc.fetchCAInfoWithIssuer(ac.issuer.ID.String(), IssuanceUsage) - if err != nil { - return nil, "", fmt.Errorf("failed loading CA %s: %w", ac.issuer.ID.String(), err) - } - - // ACME issued cert will override the TTL values to truncate to the issuer's - // expiration if we go beyond, no matter the setting - if signingBundle.LeafNotAfterBehavior == certutil.ErrNotAfterBehavior { - signingBundle.LeafNotAfterBehavior = certutil.TruncateNotAfterBehavior - } - - input := &inputBundle{ - req: &logical.Request{}, - apiData: data, - role: ac.role, - } - - normalNotAfter, _, err := getCertificateNotAfter(ac.sc.Backend, input, signingBundle) - if err != nil { - return nil, "", fmt.Errorf("failed computing certificate TTL from role/mount: %v: %w", err, ErrMalformed) - } - - // Force a maximum 90 day TTL or lower for ACME - if time.Now().Add(maxAcmeCertTTL).Before(normalNotAfter) { - input.apiData.Raw["ttl"] = maxAcmeCertTTL - } - - if csr.PublicKeyAlgorithm == x509.UnknownPublicKeyAlgorithm || csr.PublicKey == nil { - return nil, "", fmt.Errorf("%w: Refusing to sign CSR with empty PublicKey", ErrBadCSR) - } - - // UseCSRValues as defined in certutil/helpers.go accepts the following - // fields off of the CSR: - // - // 1. Subject fields, - // 2. SANs, - // 3. Extensions (except for a BasicConstraint extension) - // - // Because we have stricter validation of subject parameters, and no way - // to validate or allow extensions, we do not wish to use the CSR's - // parameters for these values. If a CSR sets, e.g., an organizational - // unit, we have no way of validating this (via ACME here, without perhaps - // an external policy engine), and thus should not be setting it on our - // final issued certificate. - parsedBundle, _, err := signCert(ac.sc.Backend, input, signingBundle, false /* is_ca=false */, false /* use_csr_values */) - if err != nil { - return nil, "", fmt.Errorf("%w: refusing to sign CSR: %s", ErrBadCSR, err.Error()) - } - - if err = parsedBundle.Verify(); err != nil { - return nil, "", fmt.Errorf("verification of parsed bundle failed: %w", err) - } - - // We only allow ServerAuth key usage from ACME issued certs. - for _, usage := range parsedBundle.Certificate.ExtKeyUsage { - if usage != x509.ExtKeyUsageServerAuth { - return nil, "", fmt.Errorf("%w: ACME certs only allow ServerAuth key usage", ErrBadCSR) - } - } - - return parsedBundle, issuerId, err -} - -func parseCsrFromFinalize(data map[string]interface{}) (*x509.CertificateRequest, error) { - csrInterface, present := data["csr"] - if !present { - return nil, fmt.Errorf("%w: missing csr in payload", ErrMalformed) - } - - base64Csr, ok := csrInterface.(string) - if !ok { - return nil, fmt.Errorf("%w: csr in payload not the expected type: %T", ErrMalformed, csrInterface) - } - - derCsr, err := base64.RawURLEncoding.DecodeString(base64Csr) - if err != nil { - return nil, fmt.Errorf("%w: failed base64 decoding csr: %s", ErrMalformed, err.Error()) - } - - csr, err := x509.ParseCertificateRequest(derCsr) - if err != nil { - return nil, fmt.Errorf("%w: failed to parse csr: %s", ErrMalformed, err.Error()) - } - - if csr.PublicKey == nil || csr.PublicKeyAlgorithm == x509.UnknownPublicKeyAlgorithm { - return nil, fmt.Errorf("%w: failed to parse csr no public key info or unknown key algorithm used", ErrBadCSR) - } - - for _, ext := range csr.Extensions { - if ext.Id.Equal(certutil.ExtensionBasicConstraintsOID) { - isCa, _, err := certutil.ParseBasicConstraintExtension(ext) - if err != nil { - return nil, fmt.Errorf("%w: refusing to accept CSR with Basic Constraints extension: %v", ErrBadCSR, err.Error()) - } - - if isCa { - return nil, fmt.Errorf("%w: refusing to accept CSR with Basic Constraints extension with CA set to true", ErrBadCSR) - } - } - } - - return csr, nil -} - -func (b *backend) acmeGetOrderHandler(ac *acmeContext, _ *logical.Request, fields *framework.FieldData, uc *jwsCtx, _ map[string]interface{}, _ *acmeAccount) (*logical.Response, error) { - orderId := fields.Get("order_id").(string) - - order, err := b.acmeState.LoadOrder(ac, uc, orderId) - if err != nil { - return nil, err - } - - order.Status, err = computeOrderStatus(ac, uc, order) - if err != nil { - return nil, err - } - - // Per RFC 8555 -> 7.1.3. Order Objects - // For final orders (in the "valid" or "invalid" state), the authorizations that were completed. - // - // Otherwise, for "pending" orders we will return our list as it was originally saved. - requiresFiltering := order.Status == ACMEOrderValid || order.Status == ACMEOrderInvalid - if requiresFiltering { - filteredAuthorizationIds := []string{} - - for _, authId := range order.AuthorizationIds { - authorization, err := b.acmeState.LoadAuthorization(ac, uc, authId) - if err != nil { - return nil, err - } - - if (order.Status == ACMEOrderInvalid || order.Status == ACMEOrderValid) && - authorization.Status == ACMEAuthorizationValid { - filteredAuthorizationIds = append(filteredAuthorizationIds, authId) - } - } - - order.AuthorizationIds = filteredAuthorizationIds - } - - return formatOrderResponse(ac, order), nil -} - -func (b *backend) acmeListOrdersHandler(ac *acmeContext, _ *logical.Request, _ *framework.FieldData, uc *jwsCtx, _ map[string]interface{}, acct *acmeAccount) (*logical.Response, error) { - orderIds, err := b.acmeState.ListOrderIds(ac, acct.KeyId) - if err != nil { - return nil, err - } - - orderUrls := []string{} - for _, orderId := range orderIds { - order, err := b.acmeState.LoadOrder(ac, uc, orderId) - if err != nil { - return nil, err - } - - if order.Status == ACMEOrderInvalid { - // Per RFC8555 -> 7.1.2.1 - Orders List - // The server SHOULD include pending orders and SHOULD NOT - // include orders that are invalid in the array of URLs. - continue - } - - orderUrls = append(orderUrls, buildOrderUrl(ac, orderId)) - } - - resp := &logical.Response{ - Data: map[string]interface{}{ - "orders": orderUrls, - }, - } - - return resp, nil -} - -func (b *backend) acmeNewOrderHandler(ac *acmeContext, _ *logical.Request, _ *framework.FieldData, _ *jwsCtx, data map[string]interface{}, account *acmeAccount) (*logical.Response, error) { - identifiers, err := parseOrderIdentifiers(data) - if err != nil { - return nil, err - } - - notBefore, err := parseOptRFC3339Field(data, "notBefore") - if err != nil { - return nil, err - } - - notAfter, err := parseOptRFC3339Field(data, "notAfter") - if err != nil { - return nil, err - } - - if !notBefore.IsZero() || !notAfter.IsZero() { - return nil, fmt.Errorf("%w: NotBefore and NotAfter are not supported", ErrMalformed) - } - - err = validateAcmeProvidedOrderDates(notBefore, notAfter) - if err != nil { - return nil, err - } - - err = b.validateIdentifiersAgainstRole(ac.role, identifiers) - if err != nil { - return nil, err - } - - // Per RFC 8555 -> 7.1.3. Order Objects - // For pending orders, the authorizations that the client needs to complete before the - // requested certificate can be issued (see Section 7.5), including - // unexpired authorizations that the client has completed in the past - // for identifiers specified in the order. - // - // Since we are generating all authorizations here, there is no need to filter them out - // IF/WHEN we support pre-authz workflows and associate existing authorizations to this - // order they will need filtering. - var authorizations []*ACMEAuthorization - var authorizationIds []string - for _, identifier := range identifiers { - authz, err := generateAuthorization(account, identifier) - if err != nil { - return nil, fmt.Errorf("error generating authorizations: %w", err) - } - authorizations = append(authorizations, authz) - - err = b.acmeState.SaveAuthorization(ac, authz) - if err != nil { - return nil, fmt.Errorf("failed storing authorization: %w", err) - } - - authorizationIds = append(authorizationIds, authz.Id) - } - - order := &acmeOrder{ - OrderId: genUuid(), - AccountId: account.KeyId, - Status: ACMEOrderPending, - Expires: time.Now().Add(24 * time.Hour), // TODO: Readjust this based on authz and/or config - Identifiers: identifiers, - AuthorizationIds: authorizationIds, - } - - err = b.acmeState.SaveOrder(ac, order) - if err != nil { - return nil, fmt.Errorf("failed storing order: %w", err) - } - - resp := formatOrderResponse(ac, order) - - // Per RFC 8555 Section 7.4. Applying for Certificate Issuance: - // - // > If the server is willing to issue the requested certificate, it - // > responds with a 201 (Created) response. - resp.Data[logical.HTTPStatusCode] = http.StatusCreated - return resp, nil -} - -func validateAcmeProvidedOrderDates(notBefore time.Time, notAfter time.Time) error { - if !notBefore.IsZero() && !notAfter.IsZero() { - if notBefore.Equal(notAfter) { - return fmt.Errorf("%w: provided notBefore and notAfter dates can not be equal", ErrMalformed) - } - - if notBefore.After(notAfter) { - return fmt.Errorf("%w: provided notBefore can not be greater than notAfter", ErrMalformed) - } - } - - if !notAfter.IsZero() { - if time.Now().After(notAfter) { - return fmt.Errorf("%w: provided notAfter can not be in the past", ErrMalformed) - } - } - - return nil -} - -func formatOrderResponse(acmeCtx *acmeContext, order *acmeOrder) *logical.Response { - baseOrderUrl := buildOrderUrl(acmeCtx, order.OrderId) - - var authorizationUrls []string - for _, authId := range order.AuthorizationIds { - authorizationUrls = append(authorizationUrls, buildAuthorizationUrl(acmeCtx, authId)) - } - - var identifiers []map[string]interface{} - for _, identifier := range order.Identifiers { - identifiers = append(identifiers, identifier.NetworkMarshal( /* use original value */ true)) - } - - resp := &logical.Response{ - Data: map[string]interface{}{ - "status": order.Status, - "expires": order.Expires.Format(time.RFC3339), - "identifiers": identifiers, - "authorizations": authorizationUrls, - "finalize": baseOrderUrl + "/finalize", - }, - Headers: map[string][]string{ - "Location": {baseOrderUrl}, - }, - } - - // Only reply with the certificate URL if we are in a valid order state. - if order.Status == ACMEOrderValid { - resp.Data["certificate"] = baseOrderUrl + "/cert" - } - - return resp -} - -func buildAuthorizationUrl(acmeCtx *acmeContext, authId string) string { - return acmeCtx.baseUrl.JoinPath("authorization", authId).String() -} - -func buildOrderUrl(acmeCtx *acmeContext, orderId string) string { - return acmeCtx.baseUrl.JoinPath("order", orderId).String() -} - -func generateAuthorization(acct *acmeAccount, identifier *ACMEIdentifier) (*ACMEAuthorization, error) { - authId := genUuid() - - // Certain challenges have certain restrictions: DNS challenges cannot - // be used to validate IP addresses, and only DNS challenges can be used - // to validate wildcards. - allowedChallenges := []ACMEChallengeType{ACMEHTTPChallenge, ACMEDNSChallenge, ACMEALPNChallenge} - if identifier.Type == ACMEIPIdentifier { - allowedChallenges = []ACMEChallengeType{ACMEHTTPChallenge} - } else if identifier.IsWildcard { - allowedChallenges = []ACMEChallengeType{ACMEDNSChallenge} - } - - var challenges []*ACMEChallenge - for _, challengeType := range allowedChallenges { - token, err := getACMEToken() - if err != nil { - return nil, err - } - - challenge := &ACMEChallenge{ - Type: challengeType, - Status: ACMEChallengePending, - ChallengeFields: map[string]interface{}{ - "token": token, - }, - } - - challenges = append(challenges, challenge) - } - - return &ACMEAuthorization{ - Id: authId, - AccountId: acct.KeyId, - Identifier: identifier, - Status: ACMEAuthorizationPending, - Expires: "", // only populated when it switches to valid. - Challenges: challenges, - Wildcard: identifier.IsWildcard, - }, nil -} - -func parseOptRFC3339Field(data map[string]interface{}, keyName string) (time.Time, error) { - var timeVal time.Time - var err error - - rawBefore, present := data[keyName] - if present { - beforeStr, ok := rawBefore.(string) - if !ok { - return timeVal, fmt.Errorf("invalid type (%T) for field '%s': %w", rawBefore, keyName, ErrMalformed) - } - timeVal, err = time.Parse(time.RFC3339, beforeStr) - if err != nil { - return timeVal, fmt.Errorf("failed parsing field '%s' (%s): %s: %w", keyName, rawBefore, err.Error(), ErrMalformed) - } - - if timeVal.IsZero() { - return timeVal, fmt.Errorf("provided time value is invalid '%s' (%s): %w", keyName, rawBefore, ErrMalformed) - } - } - - return timeVal, nil -} - -func parseOrderIdentifiers(data map[string]interface{}) ([]*ACMEIdentifier, error) { - rawIdentifiers, present := data["identifiers"] - if !present { - return nil, fmt.Errorf("missing required identifiers argument: %w", ErrMalformed) - } - - listIdentifiers, ok := rawIdentifiers.([]interface{}) - if !ok { - return nil, fmt.Errorf("invalid type (%T) for field 'identifiers': %w", rawIdentifiers, ErrMalformed) - } - - var identifiers []*ACMEIdentifier - for _, rawIdentifier := range listIdentifiers { - mapIdentifier, ok := rawIdentifier.(map[string]interface{}) - if !ok { - return nil, fmt.Errorf("invalid type (%T) for value in 'identifiers': %w", rawIdentifier, ErrMalformed) - } - - typeVal, present := mapIdentifier["type"] - if !present { - return nil, fmt.Errorf("missing type argument for value in 'identifiers': %w", ErrMalformed) - } - typeStr, ok := typeVal.(string) - if !ok { - return nil, fmt.Errorf("invalid type for type argument (%T) for value in 'identifiers': %w", typeStr, ErrMalformed) - } - - valueVal, present := mapIdentifier["value"] - if !present { - return nil, fmt.Errorf("missing value argument for value in 'identifiers': %w", ErrMalformed) - } - valueStr, ok := valueVal.(string) - if !ok { - return nil, fmt.Errorf("invalid type for value argument (%T) for value in 'identifiers': %w", valueStr, ErrMalformed) - } - - if len(valueStr) == 0 { - return nil, fmt.Errorf("value argument for value in 'identifiers' can not be blank: %w", ErrMalformed) - } - - identifier := &ACMEIdentifier{ - Value: valueStr, - OriginalValue: valueStr, - } - - switch typeStr { - case string(ACMEIPIdentifier): - identifier.Type = ACMEIPIdentifier - ip := net.ParseIP(valueStr) - if ip == nil { - return nil, fmt.Errorf("value argument (%s) failed validation: failed parsing as IP: %w", valueStr, ErrMalformed) - } - case string(ACMEDNSIdentifier): - identifier.Type = ACMEDNSIdentifier - - // This check modifies the identifier if it is a wildcard, - // removing the non-wildcard portion. We do this before the - // IP address checks, in case of an attempt to bypass the IP/DNS - // check via including a leading wildcard (e.g., *.127.0.0.1). - // - // Per RFC 8555 Section 7.1.4. Authorization Objects: - // - // > Wildcard domain names (with "*" as the first label) MUST NOT - // > be included in authorization objects. - if _, _, err := identifier.MaybeParseWildcard(); err != nil { - return nil, fmt.Errorf("value argument (%s) failed validation: invalid wildcard: %v: %w", valueStr, err, ErrMalformed) - } - - if isIP := net.ParseIP(identifier.Value); isIP != nil { - return nil, fmt.Errorf("refusing to accept argument (%s) as DNS type identifier: parsed OK as IP address: %w", valueStr, ErrMalformed) - } - - // Use the reduced (identifier.Value) in case this was a wildcard - // domain. - p := idna.New(idna.ValidateForRegistration()) - converted, err := p.ToASCII(identifier.Value) - if err != nil { - return nil, fmt.Errorf("value argument (%s) failed validation: %s: %w", valueStr, err.Error(), ErrMalformed) - } - - // Per RFC 8555 Section 7.1.4. Authorization Objects: - // - // > The domain name MUST be encoded in the form in which it - // > would appear in a certificate. That is, it MUST be encoded - // > according to the rules in Section 7 of [RFC5280]. Servers - // > MUST verify any identifier values that begin with the - // > ASCII-Compatible Encoding prefix "xn--" as defined in - // > [RFC5890] are properly encoded. - if identifier.Value != converted { - return nil, fmt.Errorf("value argument (%s) failed IDNA round-tripping to ASCII: %w", valueStr, ErrMalformed) - } - default: - return nil, fmt.Errorf("unsupported identifier type %s: %w", typeStr, ErrUnsupportedIdentifier) - } - - identifiers = append(identifiers, identifier) - } - - return identifiers, nil -} - -func (b *backend) acmeTidyOrder(ac *acmeContext, accountId string, orderPath string, certTidyBuffer time.Duration) (bool, time.Time, error) { - // First we get the order; note that the orderPath includes the account - // It's only accessed at acme/orders/ with the account context - // It's saved at acme//orders/ - entry, err := ac.sc.Storage.Get(ac.sc.Context, orderPath) - if err != nil { - return false, time.Time{}, fmt.Errorf("error loading order: %w", err) - } - if entry == nil { - return false, time.Time{}, fmt.Errorf("order does not exist: %w", ErrMalformed) - } - var order acmeOrder - err = entry.DecodeJSON(&order) - if err != nil { - return false, time.Time{}, fmt.Errorf("error decoding order: %w", err) - } - - // Determine whether we should tidy this order - shouldTidy := false - - // Track either the order expiry or certificate expiry to return to the caller, this - // can be used to influence the account's expiry - orderExpiry := order.CertificateExpiry - - // It is faster to check certificate information on the order entry rather than fetch the cert entry to parse: - if !order.CertificateExpiry.IsZero() { - // This implies that a certificate exists - // When a certificate exists, we want to expire and tidy the order when we tidy the certificate: - if time.Now().After(order.CertificateExpiry.Add(certTidyBuffer)) { // It's time to clean - shouldTidy = true - } - } else { - // This implies that no certificate exists - // In this case, we want to expire the order after it has expired (+ some safety buffer) - if time.Now().After(order.Expires) { - shouldTidy = true - } - orderExpiry = order.Expires - } - if shouldTidy == false { - return shouldTidy, orderExpiry, nil - } - - // Tidy this Order - // That includes any certificate acme//orders/orderPath/cert - // That also includes any related authorizations: acme//authorizations/ - - // First Authorizations - for _, authorizationId := range order.AuthorizationIds { - err = ac.sc.Storage.Delete(ac.sc.Context, getAuthorizationPath(accountId, authorizationId)) - if err != nil { - return false, orderExpiry, err - } - } - - // Normal Tidy will Take Care of the Certificate, we need to clean up the certificate to account tracker though - err = ac.sc.Storage.Delete(ac.sc.Context, getAcmeSerialToAccountTrackerPath(accountId, order.CertificateSerialNumber)) - if err != nil { - return false, orderExpiry, err - } - - // And Finally, the order: - err = ac.sc.Storage.Delete(ac.sc.Context, orderPath) - if err != nil { - return false, orderExpiry, err - } - b.tidyStatusIncDelAcmeOrderCount() - - return true, orderExpiry, nil -} diff --git a/builtin/logical/pki/path_acme_order_test.go b/builtin/logical/pki/path_acme_order_test.go deleted file mode 100644 index 5340bbd31caca8..00000000000000 --- a/builtin/logical/pki/path_acme_order_test.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "net" - "testing" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" - "github.com/stretchr/testify/require" -) - -// TestACME_ValidateIdentifiersAgainstRole Verify the ACME order creation -// function verifies somewhat the identifiers that were provided have a -// decent chance of being allowed by the selected role. -func TestACME_ValidateIdentifiersAgainstRole(t *testing.T) { - b, _ := CreateBackendWithStorage(t) - - tests := []struct { - name string - role *roleEntry - identifiers []*ACMEIdentifier - expectErr bool - }{ - { - name: "verbatim-role-allows-dns-ip", - role: buildSignVerbatimRoleWithNoData(nil), - identifiers: _buildACMEIdentifiers("test.com", "127.0.0.1"), - expectErr: false, - }, - { - name: "default-role-does-not-allow-dns", - role: buildTestRole(t, nil), - identifiers: _buildACMEIdentifiers("www.test.com"), - expectErr: true, - }, - { - name: "default-role-allows-ip", - role: buildTestRole(t, nil), - identifiers: _buildACMEIdentifiers("192.168.0.1"), - expectErr: false, - }, - { - name: "disable-ip-sans-forbids-ip", - role: buildTestRole(t, map[string]interface{}{"allow_ip_sans": false}), - identifiers: _buildACMEIdentifiers("192.168.0.1"), - expectErr: true, - }, - { - name: "role-no-wildcards-allowed-without", - role: buildTestRole(t, map[string]interface{}{ - "allow_subdomains": true, - "allow_bare_domains": true, - "allowed_domains": []string{"test.com"}, - "allow_wildcard_certificates": false, - }), - identifiers: _buildACMEIdentifiers("www.test.com", "test.com"), - expectErr: false, - }, - { - name: "role-no-wildcards-allowed-with-wildcard", - role: buildTestRole(t, map[string]interface{}{ - "allow_subdomains": true, - "allowed_domains": []string{"test.com"}, - "allow_wildcard_certificates": false, - }), - identifiers: _buildACMEIdentifiers("*.test.com"), - expectErr: true, - }, - { - name: "role-wildcards-allowed-with-wildcard", - role: buildTestRole(t, map[string]interface{}{ - "allow_subdomains": true, - "allowed_domains": []string{"test.com"}, - "allow_wildcard_certificates": true, - }), - identifiers: _buildACMEIdentifiers("*.test.com"), - expectErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := b.validateIdentifiersAgainstRole(tt.role, tt.identifiers) - - if tt.expectErr { - require.Error(t, err, "validateIdentifiersAgainstRole(%v, %v)", tt.role.ToResponseData(), tt.identifiers) - // If we did return an error if should be classified as a ErrRejectedIdentifier - require.ErrorIs(t, err, ErrRejectedIdentifier) - } else { - require.NoError(t, err, "validateIdentifiersAgainstRole(%v, %v)", tt.role.ToResponseData(), tt.identifiers) - } - }) - } -} - -func _buildACMEIdentifiers(values ...string) []*ACMEIdentifier { - var identifiers []*ACMEIdentifier - - for _, value := range values { - identifiers = append(identifiers, _buildACMEIdentifier(value)) - } - - return identifiers -} - -func _buildACMEIdentifier(val string) *ACMEIdentifier { - ip := net.ParseIP(val) - if ip == nil { - identifier := &ACMEIdentifier{Type: "dns", Value: val, OriginalValue: val, IsWildcard: false} - _, _, _ = identifier.MaybeParseWildcard() - return identifier - } - - return &ACMEIdentifier{Type: "ip", Value: val, OriginalValue: val, IsWildcard: false} -} - -// Easily allow tests to create valid roles with proper defaults, since we don't have an easy -// way to generate roles with proper defaults, go through the createRole handler with the handlers -// field data so we pickup all the defaults specified there. -func buildTestRole(t *testing.T, config map[string]interface{}) *roleEntry { - b, s := CreateBackendWithStorage(t) - - path := pathRoles(b) - fields := path.Fields - if config == nil { - config = map[string]interface{}{} - } - - if _, exists := config["name"]; !exists { - config["name"] = genUuid() - } - - _, err := b.pathRoleCreate(ctx, &logical.Request{Storage: s}, &framework.FieldData{Raw: config, Schema: fields}) - require.NoError(t, err, "failed generating role with config %v", config) - - role, err := b.getRole(ctx, s, config["name"].(string)) - require.NoError(t, err, "failed loading stored role") - - return role -} diff --git a/builtin/logical/pki/path_acme_revoke.go b/builtin/logical/pki/path_acme_revoke.go deleted file mode 100644 index 9a71f1cd911003..00000000000000 --- a/builtin/logical/pki/path_acme_revoke.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "bytes" - "crypto" - "crypto/x509" - "encoding/base64" - "fmt" - "time" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/logical" -) - -func pathAcmeRevoke(b *backend) []*framework.Path { - return buildAcmeFrameworkPaths(b, patternAcmeRevoke, "/revoke-cert") -} - -func patternAcmeRevoke(b *backend, pattern string) *framework.Path { - fields := map[string]*framework.FieldSchema{} - addFieldsForACMEPath(fields, pattern) - addFieldsForACMERequest(fields) - - return &framework.Path{ - Pattern: pattern, - Fields: fields, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.acmeParsedWrapper(b.acmeRevocationHandler), - ForwardPerformanceSecondary: false, - ForwardPerformanceStandby: true, - }, - }, - - HelpSynopsis: pathAcmeHelpSync, - HelpDescription: pathAcmeHelpDesc, - } -} - -func (b *backend) acmeRevocationHandler(acmeCtx *acmeContext, _ *logical.Request, _ *framework.FieldData, userCtx *jwsCtx, data map[string]interface{}) (*logical.Response, error) { - var cert *x509.Certificate - - rawCertificate, present := data["certificate"] - if present { - certBase64, ok := rawCertificate.(string) - if !ok { - return nil, fmt.Errorf("invalid type (%T; expected string) for field 'certificate': %w", rawCertificate, ErrMalformed) - } - - certBytes, err := base64.RawURLEncoding.DecodeString(certBase64) - if err != nil { - return nil, fmt.Errorf("failed to base64 decode certificate: %v: %w", err, ErrMalformed) - } - - cert, err = x509.ParseCertificate(certBytes) - if err != nil { - return nil, fmt.Errorf("failed to parse certificate: %v: %w", err, ErrMalformed) - } - } else { - return nil, fmt.Errorf("bad request was lacking required field 'certificate': %w", ErrMalformed) - } - - rawReason, present := data["reason"] - if present { - reason, ok := rawReason.(float64) - if !ok { - return nil, fmt.Errorf("invalid type (%T; expected float64) for field 'reason': %w", rawReason, ErrMalformed) - } - - if int(reason) != 0 { - return nil, fmt.Errorf("Vault does not support revocation reasons (got %v; expected omitted or 0/unspecified): %w", int(reason), ErrBadRevocationReason) - } - } - - // If the certificate expired, there's no point in revoking it. - if cert.NotAfter.Before(time.Now()) { - return nil, fmt.Errorf("refusing to revoke expired certificate: %w", ErrMalformed) - } - - // Fetch the CRL config as we need it to ultimately do the - // revocation. This should be cached and thus relatively fast. - config, err := b.crlBuilder.getConfigWithUpdate(acmeCtx.sc) - if err != nil { - return nil, fmt.Errorf("unable to revoke certificate: failed reading revocation config: %v: %w", err, ErrServerInternal) - } - - // Load our certificate from storage to ensure it exists and matches - // what was given to us. - serial := serialFromCert(cert) - certEntry, err := fetchCertBySerial(acmeCtx.sc, "certs/", serial) - if err != nil { - return nil, fmt.Errorf("unable to revoke certificate: err reading global cert entry: %v: %w", err, ErrServerInternal) - } - if certEntry == nil { - return nil, fmt.Errorf("unable to revoke certificate: no global cert entry found: %w", ErrServerInternal) - } - - // Validate that the provided certificate matches the stored - // certificate. This completes the chain of: - // - // provided_auth -> provided_cert == stored cert. - // - // Allowing revocation to be safe. - // - // We use the non-subtle unsafe bytes equality check here as we have - // already fetched this certificate from storage, thus already leaking - // timing information that this cert exists. The user could thus simply - // fetch the cert from Vault matching this serial number via the unauthed - // pki/certs/:serial API endpoint. - if !bytes.Equal(certEntry.Value, cert.Raw) { - return nil, fmt.Errorf("unable to revoke certificate: supplied certificate does not match CA's stored value: %w", ErrMalformed) - } - - // Check if it was already revoked; in this case, we do not need to - // revoke it again and want to respond with an appropriate error message. - revEntry, err := fetchCertBySerial(acmeCtx.sc, "revoked/", serial) - if err != nil { - return nil, fmt.Errorf("unable to revoke certificate: err reading revocation entry: %v: %w", err, ErrServerInternal) - } - if revEntry != nil { - return nil, fmt.Errorf("unable to revoke certificate: %w", ErrAlreadyRevoked) - } - - // Finally, do the relevant permissions/authorization check as - // appropriate based on the type of revocation happening. - if !userCtx.Existing { - return b.acmeRevocationByPoP(acmeCtx, userCtx, cert, config) - } - - return b.acmeRevocationByAccount(acmeCtx, userCtx, cert, config) -} - -func (b *backend) acmeRevocationByPoP(acmeCtx *acmeContext, userCtx *jwsCtx, cert *x509.Certificate, config *crlConfig) (*logical.Response, error) { - // Since this account does not exist, ensure we've gotten a private key - // matching the certificate's public key. - signer, ok := userCtx.Key.Key.(crypto.Signer) - if !ok { - return nil, fmt.Errorf("unable to revoke certificate: unable to parse JWS key of type (%T): %w", userCtx.Key.Key, ErrMalformed) - } - - // Ensure that our PoP is indeed valid. - if err := validatePrivateKeyMatchesCert(signer, cert); err != nil { - return nil, fmt.Errorf("unable to revoke certificate: unable to verify proof of possession: %v: %w", err, ErrMalformed) - } - - // Now it is safe to revoke. - b.revokeStorageLock.Lock() - defer b.revokeStorageLock.Unlock() - - return revokeCert(acmeCtx.sc, config, cert) -} - -func (b *backend) acmeRevocationByAccount(acmeCtx *acmeContext, userCtx *jwsCtx, cert *x509.Certificate, config *crlConfig) (*logical.Response, error) { - // Fetch the account; disallow revocations from non-valid-status accounts. - _, err := requireValidAcmeAccount(acmeCtx, userCtx) - if err != nil { - return nil, fmt.Errorf("failed to lookup account: %w", err) - } - - // We only support certificates issued by this user, we don't support - // cross-account revocations. - serial := serialFromCert(cert) - acmeEntry, err := b.acmeState.GetIssuedCert(acmeCtx, userCtx.Kid, serial) - if err != nil || acmeEntry == nil { - return nil, fmt.Errorf("unable to revoke certificate: %v: %w", err, ErrMalformed) - } - - // Now it is safe to revoke. - b.revokeStorageLock.Lock() - defer b.revokeStorageLock.Unlock() - - return revokeCert(acmeCtx.sc, config, cert) -} diff --git a/builtin/logical/pki/path_acme_test.go b/builtin/logical/pki/path_acme_test.go deleted file mode 100644 index aed906863466fc..00000000000000 --- a/builtin/logical/pki/path_acme_test.go +++ /dev/null @@ -1,1578 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "context" - "crypto" - "crypto/ecdsa" - "crypto/elliptic" - "crypto/rand" - "crypto/rsa" - "crypto/x509" - "crypto/x509/pkix" - "encoding/base64" - "encoding/json" - "fmt" - "io" - "net" - "net/http" - "os" - "path" - "strings" - "testing" - "time" - - "github.com/hashicorp/vault/sdk/helper/certutil" - - "github.com/go-test/deep" - "github.com/stretchr/testify/require" - "golang.org/x/crypto/acme" - "golang.org/x/net/http2" - - "github.com/hashicorp/go-cleanhttp" - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/builtin/logical/pki/dnstest" - "github.com/hashicorp/vault/helper/constants" - "github.com/hashicorp/vault/helper/testhelpers" - vaulthttp "github.com/hashicorp/vault/http" - "github.com/hashicorp/vault/sdk/helper/jsonutil" - "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/vault" -) - -// TestAcmeBasicWorkflow a test that will validate a basic ACME workflow using the Golang ACME client. -func TestAcmeBasicWorkflow(t *testing.T) { - t.Parallel() - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - cases := []struct { - name string - prefixUrl string - }{ - {"root", "acme/"}, - {"role", "roles/test-role/acme/"}, - {"issuer", "issuer/int-ca/acme/"}, - {"issuer_role", "issuer/int-ca/roles/test-role/acme/"}, - } - testCtx := context.Background() - - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - baseAcmeURL := "/v1/pki/" + tc.prefixUrl - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - t.Logf("Testing discover on %s", baseAcmeURL) - discovery, err := acmeClient.Discover(testCtx) - require.NoError(t, err, "failed acme discovery call") - - discoveryBaseUrl := client.Address() + baseAcmeURL - require.Equal(t, discoveryBaseUrl+"new-nonce", discovery.NonceURL) - require.Equal(t, discoveryBaseUrl+"new-account", discovery.RegURL) - require.Equal(t, discoveryBaseUrl+"new-order", discovery.OrderURL) - require.Equal(t, discoveryBaseUrl+"revoke-cert", discovery.RevokeURL) - require.Equal(t, discoveryBaseUrl+"key-change", discovery.KeyChangeURL) - require.False(t, discovery.ExternalAccountRequired, "bad value for external account required in directory") - - // Attempt to update prior to creating an account - t.Logf("Testing updates with no proper account fail on %s", baseAcmeURL) - _, err = acmeClient.UpdateReg(testCtx, &acme.Account{Contact: []string{"mailto:shouldfail@example.com"}}) - require.ErrorIs(t, err, acme.ErrNoAccount, "expected failure attempting to update prior to account registration") - - // Create new account - t.Logf("Testing register on %s", baseAcmeURL) - acct, err := acmeClient.Register(testCtx, &acme.Account{ - Contact: []string{"mailto:test@example.com", "mailto:test2@test.com"}, - }, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - require.Equal(t, acme.StatusValid, acct.Status) - require.Contains(t, acct.Contact, "mailto:test@example.com") - require.Contains(t, acct.Contact, "mailto:test2@test.com") - require.Len(t, acct.Contact, 2) - - // Call register again we should get existing account - t.Logf("Testing duplicate register returns existing account on %s", baseAcmeURL) - _, err = acmeClient.Register(testCtx, acct, func(tosURL string) bool { return true }) - require.ErrorIs(t, err, acme.ErrAccountAlreadyExists, - "We should have returned a 200 status code which would have triggered an error in the golang acme"+ - " library") - - // Update contact - t.Logf("Testing Update account contacts on %s", baseAcmeURL) - acct.Contact = []string{"mailto:test3@example.com"} - acct2, err := acmeClient.UpdateReg(testCtx, acct) - require.NoError(t, err, "failed updating account") - require.Equal(t, acme.StatusValid, acct2.Status) - // We should get this back, not the original values. - require.Contains(t, acct2.Contact, "mailto:test3@example.com") - require.Len(t, acct2.Contact, 1) - - // Make sure order's do not accept dates - _, err = acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{{Type: "dns", Value: "localhost"}}, - acme.WithOrderNotBefore(time.Now().Add(10*time.Minute))) - require.Error(t, err, "should have rejected a new order with NotBefore set") - - _, err = acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{{Type: "dns", Value: "localhost"}}, - acme.WithOrderNotAfter(time.Now().Add(10*time.Minute))) - require.Error(t, err, "should have rejected a new order with NotAfter set") - - // Make sure DNS identifiers cannot include IP addresses - _, err = acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{{Type: "dns", Value: "127.0.0.1"}}, - acme.WithOrderNotAfter(time.Now().Add(10*time.Minute))) - require.Error(t, err, "should have rejected a new order with IP-like DNS-type identifier") - _, err = acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{{Type: "dns", Value: "*.127.0.0.1"}}, - acme.WithOrderNotAfter(time.Now().Add(10*time.Minute))) - require.Error(t, err, "should have rejected a new order with IP-like DNS-type identifier") - - // Create an order - t.Logf("Testing Authorize Order on %s", baseAcmeURL) - identifiers := []string{"localhost.localdomain", "*.localdomain"} - createOrder, err := acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{ - {Type: "dns", Value: identifiers[0]}, - {Type: "dns", Value: identifiers[1]}, - }) - require.NoError(t, err, "failed creating order") - require.Equal(t, acme.StatusPending, createOrder.Status) - require.Empty(t, createOrder.CertURL) - require.Equal(t, createOrder.URI+"/finalize", createOrder.FinalizeURL) - require.Len(t, createOrder.AuthzURLs, 2, "expected two authzurls") - - // Get order - t.Logf("Testing GetOrder on %s", baseAcmeURL) - getOrder, err := acmeClient.GetOrder(testCtx, createOrder.URI) - require.NoError(t, err, "failed fetching order") - require.Equal(t, acme.StatusPending, createOrder.Status) - if diffs := deep.Equal(createOrder, getOrder); diffs != nil { - t.Fatalf("Differences exist between create and get order: \n%v", strings.Join(diffs, "\n")) - } - - // Make sure the identifiers returned in the order contain the original values - var ids []string - for _, id := range getOrder.Identifiers { - require.Equal(t, "dns", id.Type) - ids = append(ids, id.Value) - } - require.ElementsMatch(t, identifiers, ids, "order responses should have all original identifiers") - - // Load authorizations - var authorizations []*acme.Authorization - for _, authUrl := range getOrder.AuthzURLs { - auth, err := acmeClient.GetAuthorization(testCtx, authUrl) - require.NoError(t, err, "failed fetching authorization: %s", authUrl) - - authorizations = append(authorizations, auth) - } - - // We should have 2 separate auth challenges as we have two separate identifier - require.Len(t, authorizations, 2, "expected 2 authorizations in order") - - var wildcardAuth *acme.Authorization - var domainAuth *acme.Authorization - for _, auth := range authorizations { - if auth.Wildcard { - wildcardAuth = auth - } else { - domainAuth = auth - } - } - - // Test the values for the domain authentication - require.Equal(t, acme.StatusPending, domainAuth.Status) - require.Equal(t, "dns", domainAuth.Identifier.Type) - require.Equal(t, "localhost.localdomain", domainAuth.Identifier.Value) - require.False(t, domainAuth.Wildcard, "should not be a wildcard") - require.True(t, domainAuth.Expires.IsZero(), "authorization should only have expiry set on valid status") - - require.Len(t, domainAuth.Challenges, 3, "expected three challenges") - require.Equal(t, acme.StatusPending, domainAuth.Challenges[0].Status) - require.True(t, domainAuth.Challenges[0].Validated.IsZero(), "validated time should be 0 on challenge") - require.Equal(t, "http-01", domainAuth.Challenges[0].Type) - require.NotEmpty(t, domainAuth.Challenges[0].Token, "missing challenge token") - require.Equal(t, acme.StatusPending, domainAuth.Challenges[1].Status) - require.True(t, domainAuth.Challenges[1].Validated.IsZero(), "validated time should be 0 on challenge") - require.Equal(t, "dns-01", domainAuth.Challenges[1].Type) - require.NotEmpty(t, domainAuth.Challenges[1].Token, "missing challenge token") - require.Equal(t, acme.StatusPending, domainAuth.Challenges[2].Status) - require.True(t, domainAuth.Challenges[2].Validated.IsZero(), "validated time should be 0 on challenge") - require.Equal(t, "tls-alpn-01", domainAuth.Challenges[2].Type) - require.NotEmpty(t, domainAuth.Challenges[2].Token, "missing challenge token") - - // Test the values for the wildcard authentication - require.Equal(t, acme.StatusPending, wildcardAuth.Status) - require.Equal(t, "dns", wildcardAuth.Identifier.Type) - require.Equal(t, "localdomain", wildcardAuth.Identifier.Value) // Make sure we strip the *. in auth responses - require.True(t, wildcardAuth.Wildcard, "should be a wildcard") - require.True(t, wildcardAuth.Expires.IsZero(), "authorization should only have expiry set on valid status") - - require.Len(t, wildcardAuth.Challenges, 1, "expected one challenge") - require.Equal(t, acme.StatusPending, domainAuth.Challenges[0].Status) - require.True(t, wildcardAuth.Challenges[0].Validated.IsZero(), "validated time should be 0 on challenge") - require.Equal(t, "dns-01", wildcardAuth.Challenges[0].Type) - require.NotEmpty(t, domainAuth.Challenges[0].Token, "missing challenge token") - - // Make sure that getting a challenge does not start it. - challenge, err := acmeClient.GetChallenge(testCtx, domainAuth.Challenges[0].URI) - require.NoError(t, err, "failed to load challenge") - require.Equal(t, acme.StatusPending, challenge.Status) - require.True(t, challenge.Validated.IsZero(), "validated time should be 0 on challenge") - require.Equal(t, "http-01", challenge.Type) - - // Accept a challenge; this triggers validation to start. - challenge, err = acmeClient.Accept(testCtx, domainAuth.Challenges[0]) - require.NoError(t, err, "failed to load challenge") - require.Equal(t, acme.StatusProcessing, challenge.Status) - require.True(t, challenge.Validated.IsZero(), "validated time should be 0 on challenge") - require.Equal(t, "http-01", challenge.Type) - - require.NotEmpty(t, challenge.Token, "missing challenge token") - - // HACK: Update authorization/challenge to completed as we can't really do it properly in this workflow - // test. - markAuthorizationSuccess(t, client, acmeClient, acct, getOrder) - - // Make sure sending a CSR with the account key gets rejected. - goodCr := &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: identifiers[1]}, - DNSNames: []string{identifiers[0], identifiers[1]}, - } - t.Logf("csr: %v", goodCr) - - // We want to make sure people are not using the same keys for CSR/Certs and their ACME account. - csrSignedWithAccountKey, err := x509.CreateCertificateRequest(rand.Reader, goodCr, accountKey) - require.NoError(t, err, "failed generating csr") - _, _, err = acmeClient.CreateOrderCert(testCtx, createOrder.FinalizeURL, csrSignedWithAccountKey, true) - require.Error(t, err, "should not be allowed to use the account key for a CSR") - - csrKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generated key for CSR") - - // Validate we reject CSRs that contain CN that aren't in the original order - badCr := &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: "not-in-original-order.com"}, - DNSNames: []string{identifiers[0], identifiers[1]}, - } - t.Logf("csr: %v", badCr) - - csrWithBadCName, err := x509.CreateCertificateRequest(rand.Reader, badCr, csrKey) - require.NoError(t, err, "failed generating csr with bad common name") - - _, _, err = acmeClient.CreateOrderCert(testCtx, createOrder.FinalizeURL, csrWithBadCName, true) - require.Error(t, err, "should not be allowed to csr with different common names than order") - - // Validate we reject CSRs that contain DNS names that aren't in the original order - badCr = &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: createOrder.Identifiers[0].Value}, - DNSNames: []string{"www.notinorder.com"}, - } - - csrWithBadName, err := x509.CreateCertificateRequest(rand.Reader, badCr, csrKey) - require.NoError(t, err, "failed generating csr with bad name") - - _, _, err = acmeClient.CreateOrderCert(testCtx, createOrder.FinalizeURL, csrWithBadName, true) - require.Error(t, err, "should not be allowed to csr with different names than order") - - // Validate we reject CSRs that contain IP addreses that weren't in the original order - badCr = &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: createOrder.Identifiers[0].Value}, - IPAddresses: []net.IP{{127, 0, 0, 1}}, - } - - csrWithBadIP, err := x509.CreateCertificateRequest(rand.Reader, badCr, csrKey) - require.NoError(t, err, "failed generating csr with bad name") - - _, _, err = acmeClient.CreateOrderCert(testCtx, createOrder.FinalizeURL, csrWithBadIP, true) - require.Error(t, err, "should not be allowed to csr with different ip address than order") - - // Validate we reject CSRs that contains fewer names than in the original order. - badCr = &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: identifiers[0]}, - } - - csrWithBadName, err = x509.CreateCertificateRequest(rand.Reader, badCr, csrKey) - require.NoError(t, err, "failed generating csr with bad name") - - _, _, err = acmeClient.CreateOrderCert(testCtx, createOrder.FinalizeURL, csrWithBadName, true) - require.Error(t, err, "should not be allowed to csr with different names than order") - - // Finally test a proper CSR, with the correct name and signed with a different key works. - csr, err := x509.CreateCertificateRequest(rand.Reader, goodCr, csrKey) - require.NoError(t, err, "failed generating csr") - - certs, _, err := acmeClient.CreateOrderCert(testCtx, createOrder.FinalizeURL, csr, true) - require.NoError(t, err, "failed finalizing order") - require.Len(t, certs, 3, "expected three items within the returned certs") - - testAcmeCertSignedByCa(t, client, certs, "int-ca") - - // Make sure the certificate has a NotAfter date of a maximum of 90 days - acmeCert, err := x509.ParseCertificate(certs[0]) - require.NoError(t, err, "failed parsing acme cert bytes") - maxAcmeNotAfter := time.Now().Add(maxAcmeCertTTL) - if maxAcmeNotAfter.Before(acmeCert.NotAfter) { - require.Fail(t, fmt.Sprintf("certificate has a NotAfter value %v greater than ACME max ttl %v", acmeCert.NotAfter, maxAcmeNotAfter)) - } - // Deactivate account - t.Logf("Testing deactivate account on %s", baseAcmeURL) - err = acmeClient.DeactivateReg(testCtx) - require.NoError(t, err, "failed deactivating account") - - // Make sure we get an unauthorized error trying to update the account again. - t.Logf("Testing update on deactivated account fails on %s", baseAcmeURL) - _, err = acmeClient.UpdateReg(testCtx, acct) - require.Error(t, err, "expected account to be deactivated") - require.IsType(t, &acme.Error{}, err, "expected acme error type") - acmeErr := err.(*acme.Error) - require.Equal(t, "urn:ietf:params:acme:error:unauthorized", acmeErr.ProblemType) - }) - } -} - -// TestAcmeBasicWorkflowWithEab verify that new accounts require EAB's if enforced by configuration. -func TestAcmeBasicWorkflowWithEab(t *testing.T) { - t.Parallel() - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - testCtx := context.Background() - - // Enable EAB - _, err := client.Logical().WriteWithContext(context.Background(), "pki/config/acme", map[string]interface{}{ - "enabled": true, - "eab_policy": "always-required", - }) - require.NoError(t, err) - - cases := []struct { - name string - prefixUrl string - }{ - {"root", "acme/"}, - {"role", "roles/test-role/acme/"}, - {"issuer", "issuer/int-ca/acme/"}, - {"issuer_role", "issuer/int-ca/roles/test-role/acme/"}, - } - - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - baseAcmeURL := "/v1/pki/" + tc.prefixUrl - accountKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed creating ec key") - - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - t.Logf("Testing discover on %s", baseAcmeURL) - discovery, err := acmeClient.Discover(testCtx) - require.NoError(t, err, "failed acme discovery call") - require.True(t, discovery.ExternalAccountRequired, "bad value for external account required in directory") - - // Create new account without EAB, should fail - t.Logf("Testing register on %s", baseAcmeURL) - _, err = acmeClient.Register(testCtx, &acme.Account{}, func(tosURL string) bool { return true }) - require.ErrorContains(t, err, "urn:ietf:params:acme:error:externalAccountRequired", - "expected failure creating an account without eab") - - // Test fetch, list, delete workflow - kid, _ := getEABKey(t, client, tc.prefixUrl) - resp, err := client.Logical().ListWithContext(testCtx, "pki/eab") - require.NoError(t, err, "failed to list eab tokens") - require.NotNil(t, resp, "list response for eab tokens should not be nil") - require.Contains(t, resp.Data, "keys") - require.Contains(t, resp.Data, "key_info") - require.Len(t, resp.Data["keys"], 1) - require.Contains(t, resp.Data["keys"], kid) - - _, err = client.Logical().DeleteWithContext(testCtx, "pki/eab/"+kid) - require.NoError(t, err, "failed to delete eab") - - // List eabs should return zero results - resp, err = client.Logical().ListWithContext(testCtx, "pki/eab") - require.NoError(t, err, "failed to list eab tokens") - require.Nil(t, resp, "list response for eab tokens should have been nil") - - // fetch a new EAB - kid, eabKeyBytes := getEABKey(t, client, tc.prefixUrl) - acct := &acme.Account{ - ExternalAccountBinding: &acme.ExternalAccountBinding{ - KID: kid, - Key: eabKeyBytes, - }, - } - - // Make sure we can list our key - resp, err = client.Logical().ListWithContext(testCtx, "pki/eab") - require.NoError(t, err, "failed to list eab tokens") - require.NotNil(t, resp, "list response for eab tokens should not be nil") - require.Contains(t, resp.Data, "keys") - require.Contains(t, resp.Data, "key_info") - require.Len(t, resp.Data["keys"], 1) - require.Contains(t, resp.Data["keys"], kid) - - keyInfo := resp.Data["key_info"].(map[string]interface{}) - require.Contains(t, keyInfo, kid) - - infoForKid := keyInfo[kid].(map[string]interface{}) - require.Equal(t, "hs", infoForKid["key_type"]) - require.Equal(t, tc.prefixUrl+"directory", infoForKid["acme_directory"]) - - // Create new account with EAB - t.Logf("Testing register on %s", baseAcmeURL) - _, err = acmeClient.Register(testCtx, acct, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering new account with eab") - - // Make sure our EAB is no longer available - resp, err = client.Logical().ListWithContext(context.Background(), "pki/eab") - require.NoError(t, err, "failed to list eab tokens") - require.Nil(t, resp, "list response for eab tokens should have been nil due to empty list") - - // Attempt to create another account with the same EAB as before -- should fail - accountKey2, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed creating ec key") - - acmeClient2 := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey2) - acct2 := &acme.Account{ - ExternalAccountBinding: &acme.ExternalAccountBinding{ - KID: kid, - Key: eabKeyBytes, - }, - } - - _, err = acmeClient2.Register(testCtx, acct2, func(tosURL string) bool { return true }) - require.ErrorContains(t, err, "urn:ietf:params:acme:error:unauthorized", "should fail due to EAB re-use") - - // We can lookup/find an existing account without EAB if we have the account key - _, err = acmeClient.GetReg(testCtx /* unused url */, "") - require.NoError(t, err, "expected to lookup existing account without eab") - }) - } -} - -// TestAcmeNonce a basic test that will validate we get back a nonce with the proper status codes -// based on the -func TestAcmeNonce(t *testing.T) { - t.Parallel() - cluster, client, pathConfig := setupAcmeBackend(t) - defer cluster.Cleanup() - - cases := []struct { - name string - prefixUrl string - directoryUrl string - }{ - {"root", "", "pki/acme/new-nonce"}, - {"role", "/roles/test-role", "pki/roles/test-role/acme/new-nonce"}, - {"issuer", "/issuer/default", "pki/issuer/default/acme/new-nonce"}, - {"issuer_role", "/issuer/default/roles/test-role", "pki/issuer/default/roles/test-role/acme/new-nonce"}, - } - - for _, tc := range cases { - for _, httpOp := range []string{"get", "header"} { - t.Run(fmt.Sprintf("%s-%s", tc.name, httpOp), func(t *testing.T) { - var req *api.Request - switch httpOp { - case "get": - req = client.NewRequest(http.MethodGet, "/v1/"+tc.directoryUrl) - case "header": - req = client.NewRequest(http.MethodHead, "/v1/"+tc.directoryUrl) - } - res, err := client.RawRequestWithContext(ctx, req) - require.NoError(t, err, "failed sending raw request") - _ = res.Body.Close() - - // Proper Status Code - switch httpOp { - case "get": - require.Equal(t, http.StatusNoContent, res.StatusCode) - case "header": - require.Equal(t, http.StatusOK, res.StatusCode) - } - - // Make sure we don't have a Content-Type header. - require.Equal(t, "", res.Header.Get("Content-Type")) - - // Make sure we return the Cache-Control header - require.Contains(t, res.Header.Get("Cache-Control"), "no-store", - "missing Cache-Control header with no-store header value") - - // Test for our nonce header value - require.NotEmpty(t, res.Header.Get("Replay-Nonce"), "missing Replay-Nonce header with an actual value") - - // Test Link header value - expectedLinkHeader := fmt.Sprintf("<%s>;rel=\"index\"", pathConfig+tc.prefixUrl+"/acme/directory") - require.Contains(t, res.Header.Get("Link"), expectedLinkHeader, - "different value for link header than expected") - }) - } - } -} - -// TestAcmeClusterPathNotConfigured basic testing of the ACME error handler. -func TestAcmeClusterPathNotConfigured(t *testing.T) { - t.Parallel() - cluster, client := setupTestPkiCluster(t) - defer cluster.Cleanup() - - // Go sneaky, sneaky and update the acme configuration through sys/raw to bypass config/cluster path checks - pkiMount := findStorageMountUuid(t, client, "pki") - rawPath := path.Join("/sys/raw/logical/", pkiMount, storageAcmeConfig) - _, err := client.Logical().WriteWithContext(context.Background(), rawPath, map[string]interface{}{ - "value": "{\"enabled\": true, \"eab_policy_name\": \"not-required\"}", - }) - require.NoError(t, err, "failed updating acme config through sys/raw") - - // Force reload the plugin so we read the new config we slipped in. - _, err = client.Sys().ReloadPluginWithContext(context.Background(), &api.ReloadPluginInput{Mounts: []string{"pki"}}) - require.NoError(t, err, "failed reloading plugin") - - // Do not fill in the path option within the local cluster configuration - cases := []struct { - name string - directoryUrl string - }{ - {"root", "pki/acme/directory"}, - {"role", "pki/roles/test-role/acme/directory"}, - {"issuer", "pki/issuer/default/acme/directory"}, - {"issuer_role", "pki/issuer/default/roles/test-role/acme/directory"}, - } - testCtx := context.Background() - - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - dirResp, err := client.Logical().ReadRawWithContext(testCtx, tc.directoryUrl) - require.Error(t, err, "expected failure reading ACME directory configuration got none") - - require.Equal(t, "application/problem+json", dirResp.Header.Get("Content-Type")) - require.Equal(t, http.StatusInternalServerError, dirResp.StatusCode) - - rawBodyBytes, err := io.ReadAll(dirResp.Body) - require.NoError(t, err, "failed reading from directory response body") - _ = dirResp.Body.Close() - - respType := map[string]interface{}{} - err = json.Unmarshal(rawBodyBytes, &respType) - require.NoError(t, err, "failed unmarshalling ACME directory response body") - - require.Equal(t, "urn:ietf:params:acme:error:serverInternal", respType["type"]) - require.NotEmpty(t, respType["detail"]) - }) - } -} - -// TestAcmeAccountsCrossingDirectoryPath make sure that if an account attempts to use a different ACME -// directory path that we get an error. -func TestAcmeAccountsCrossingDirectoryPath(t *testing.T) { - t.Parallel() - cluster, _, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - baseAcmeURL := "/v1/pki/acme/" - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - - testCtx := context.Background() - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - // Create new account - acct, err := acmeClient.Register(testCtx, &acme.Account{}, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - - // Try to update the account under another ACME directory - baseAcmeURL2 := "/v1/pki/roles/test-role/acme/" - acmeClient2 := getAcmeClientForCluster(t, cluster, baseAcmeURL2, accountKey) - acct.Contact = []string{"mailto:test3@example.com"} - _, err = acmeClient2.UpdateReg(testCtx, acct) - require.Error(t, err, "successfully updated account when we should have failed due to different directory") - // We don't test for the specific error about using the wrong directory, as the golang library - // swallows the error we are sending back to a no account error -} - -// TestAcmeEabCrossingDirectoryPath make sure that if an account attempts to use a different ACME -// directory path that an EAB was created within we get an error. -func TestAcmeEabCrossingDirectoryPath(t *testing.T) { - t.Parallel() - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - // Enable EAB - _, err := client.Logical().WriteWithContext(context.Background(), "pki/config/acme", map[string]interface{}{ - "enabled": true, - "eab_policy": "always-required", - }) - require.NoError(t, err) - - baseAcmeURL := "/v1/pki/acme/" - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - - testCtx := context.Background() - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - // fetch a new EAB - kid, eabKeyBytes := getEABKey(t, client, "roles/test-role/acme/") - acct := &acme.Account{ - ExternalAccountBinding: &acme.ExternalAccountBinding{ - KID: kid, - Key: eabKeyBytes, - }, - } - - // Create new account - _, err = acmeClient.Register(testCtx, acct, func(tosURL string) bool { return true }) - require.ErrorContains(t, err, "failed to verify eab", "should have failed as EAB is for a different directory") -} - -// TestAcmeDisabledWithEnvVar verifies if VAULT_DISABLE_PUBLIC_ACME is set that we completely -// disable the ACME service -func TestAcmeDisabledWithEnvVar(t *testing.T) { - // Setup a cluster with the configuration set to not-required, initially as the - // configuration will validate if the environment var is set - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - // Seal setup the environment variable, and unseal which now means we have a cluster - // with ACME configuration saying it is enabled with a bad EAB policy. - cluster.EnsureCoresSealed(t) - t.Setenv("VAULT_DISABLE_PUBLIC_ACME", "true") - cluster.UnsealCores(t) - - // Make sure that ACME is disabled now. - for _, method := range []string{http.MethodHead, http.MethodGet} { - t.Run(fmt.Sprintf("%s", method), func(t *testing.T) { - req := client.NewRequest(method, "/v1/pki/acme/new-nonce") - _, err := client.RawRequestWithContext(ctx, req) - require.Error(t, err, "should have received an error as ACME should have been disabled") - - if apiError, ok := err.(*api.ResponseError); ok { - require.Equal(t, 404, apiError.StatusCode) - } - }) - } -} - -// TestAcmeConfigChecksPublicAcmeEnv verifies certain EAB policy values can not be set if ENV var is enabled -func TestAcmeConfigChecksPublicAcmeEnv(t *testing.T) { - t.Setenv("VAULT_DISABLE_PUBLIC_ACME", "true") - cluster, client := setupTestPkiCluster(t) - defer cluster.Cleanup() - - _, err := client.Logical().WriteWithContext(context.Background(), "pki/config/cluster", map[string]interface{}{ - "path": "https://dadgarcorp.com/v1/pki", - }) - require.NoError(t, err) - - _, err = client.Logical().WriteWithContext(context.Background(), "pki/config/acme", map[string]interface{}{ - "enabled": true, - "eab_policy": string(eabPolicyAlwaysRequired), - }) - require.NoError(t, err) - - for _, policyName := range []EabPolicyName{eabPolicyNewAccountRequired, eabPolicyNotRequired} { - _, err = client.Logical().WriteWithContext(context.Background(), "pki/config/acme", map[string]interface{}{ - "enabled": true, - "eab_policy": string(policyName), - }) - require.Error(t, err, "eab policy %s should have not been allowed to be set") - } - - // Make sure we can disable ACME and the eab policy is not checked - _, err = client.Logical().WriteWithContext(context.Background(), "pki/config/acme", map[string]interface{}{ - "enabled": false, - "eab_policy": string(eabPolicyNotRequired), - }) - require.NoError(t, err) -} - -// TestAcmeTruncatesToIssuerExpiry make sure that if the selected issuer's expiry is shorter than the -// CSR's selected TTL value in ACME and the issuer's leaf_not_after_behavior setting is set to Err, -// we will override the configured behavior and truncate to the issuer's NotAfter -func TestAcmeTruncatesToIssuerExpiry(t *testing.T) { - t.Parallel() - - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - testCtx := context.Background() - mount := "pki" - resp, err := client.Logical().WriteWithContext(context.Background(), mount+"/issuers/generate/intermediate/internal", - map[string]interface{}{ - "key_name": "short-key", - "key_type": "ec", - "common_name": "test.com", - }) - require.NoError(t, err, "failed creating intermediary CSR") - intermediateCSR := resp.Data["csr"].(string) - - // Sign the intermediate CSR using /pki - resp, err = client.Logical().Write(mount+"/issuer/root-ca/sign-intermediate", map[string]interface{}{ - "csr": intermediateCSR, - "ttl": "10m", - "max_ttl": "1h", - }) - require.NoError(t, err, "failed signing intermediary CSR") - intermediateCertPEM := resp.Data["certificate"].(string) - - shortCa := parseCert(t, intermediateCertPEM) - - // Configure the intermediate cert as the CA in /pki2 - resp, err = client.Logical().Write(mount+"/issuers/import/cert", map[string]interface{}{ - "pem_bundle": intermediateCertPEM, - }) - require.NoError(t, err, "failed importing intermediary cert") - importedIssuersRaw := resp.Data["imported_issuers"].([]interface{}) - require.Len(t, importedIssuersRaw, 1) - shortCaUuid := importedIssuersRaw[0].(string) - - _, err = client.Logical().Write(mount+"/issuer/"+shortCaUuid, map[string]interface{}{ - "leaf_not_after_behavior": "err", - "issuer_name": "short-ca", - }) - require.NoError(t, err, "failed updating issuer name") - - baseAcmeURL := "/v1/pki/issuer/short-ca/acme/" - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - // Create new account - t.Logf("Testing register on %s", baseAcmeURL) - acct, err := acmeClient.Register(testCtx, &acme.Account{}, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - - // Create an order - t.Logf("Testing Authorize Order on %s", baseAcmeURL) - identifiers := []string{"*.localdomain"} - order, err := acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{ - {Type: "dns", Value: identifiers[0]}, - }) - require.NoError(t, err, "failed creating order") - - // HACK: Update authorization/challenge to completed as we can't really do it properly in this workflow - // test. - markAuthorizationSuccess(t, client, acmeClient, acct, order) - - // Build a proper CSR, with the correct name and signed with a different key works. - goodCr := &x509.CertificateRequest{DNSNames: []string{identifiers[0]}} - csrKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generated key for CSR") - csr, err := x509.CreateCertificateRequest(rand.Reader, goodCr, csrKey) - require.NoError(t, err, "failed generating csr") - - certs, _, err := acmeClient.CreateOrderCert(testCtx, order.FinalizeURL, csr, true) - require.NoError(t, err, "failed finalizing order") - require.Len(t, certs, 3, "expected full acme chain") - - testAcmeCertSignedByCa(t, client, certs, "short-ca") - - acmeCert, err := x509.ParseCertificate(certs[0]) - require.NoError(t, err, "failed parsing acme cert") - - require.Equal(t, shortCa.NotAfter, acmeCert.NotAfter, "certificate times aren't the same") -} - -// TestAcmeIgnoresRoleExtKeyUsage -func TestAcmeIgnoresRoleExtKeyUsage(t *testing.T) { - t.Parallel() - - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - testCtx := context.Background() - - roleName := "test-role" - - roleOpt := map[string]interface{}{ - "ttl_duration": "365h", - "max_ttl_duration": "720h", - "key_type": "any", - "allowed_domains": "localdomain", - "allow_subdomains": "true", - "allow_wildcard_certificates": "true", - "require_cn": "true", /* explicit default */ - "server_flag": "true", - "client_flag": "true", - "code_signing_flag": "true", - "email_protection_flag": "true", - } - - _, err := client.Logical().Write("pki/roles/"+roleName, roleOpt) - - baseAcmeURL := "/v1/pki/roles/" + roleName + "/acme/" - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - - require.NoError(t, err, "failed creating role test-role") - - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - // Create new account - t.Logf("Testing register on %s", baseAcmeURL) - acct, err := acmeClient.Register(testCtx, &acme.Account{}, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - - // Create an order - t.Logf("Testing Authorize Order on %s", baseAcmeURL) - identifiers := []string{"*.localdomain"} - order, err := acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{ - {Type: "dns", Value: identifiers[0]}, - }) - require.NoError(t, err, "failed creating order") - - // HACK: Update authorization/challenge to completed as we can't really do it properly in this workflow test. - markAuthorizationSuccess(t, client, acmeClient, acct, order) - - // Build a proper CSR, with the correct name and signed with a different key works. - goodCr := &x509.CertificateRequest{DNSNames: []string{identifiers[0]}} - csrKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generated key for CSR") - csr, err := x509.CreateCertificateRequest(rand.Reader, goodCr, csrKey) - require.NoError(t, err, "failed generating csr") - - certs, _, err := acmeClient.CreateOrderCert(testCtx, order.FinalizeURL, csr, true) - require.NoError(t, err, "order finalization failed") - require.GreaterOrEqual(t, len(certs), 1, "expected at least one cert in bundle") - acmeCert, err := x509.ParseCertificate(certs[0]) - require.NoError(t, err, "failed parsing acme cert") - - require.Equal(t, 1, len(acmeCert.ExtKeyUsage), "mis-match on expected ExtKeyUsages") - require.ElementsMatch(t, []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, acmeCert.ExtKeyUsage, - "mismatch of ExtKeyUsage flags") -} - -func TestIssuerRoleDirectoryAssociations(t *testing.T) { - t.Parallel() - - // This creates two issuers for us (root-ca, int-ca) and two - // roles (test-role, acme) that we can use with various directory - // configurations. - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - // Setup DNS for validations. - testCtx := context.Background() - dns := dnstest.SetupResolver(t, "dadgarcorp.com") - defer dns.Cleanup() - _, err := client.Logical().WriteWithContext(testCtx, "pki/config/acme", map[string]interface{}{ - "dns_resolver": dns.GetLocalAddr(), - }) - require.NoError(t, err, "failed to specify dns resolver") - - // 1. Use a forbidden role should fail. - resp, err := client.Logical().WriteWithContext(testCtx, "pki/config/acme", map[string]interface{}{ - "enabled": true, - "allowed_roles": []string{"acme"}, - }) - require.NoError(t, err, "failed to write config") - require.NotNil(t, resp) - - _, err = client.Logical().ReadWithContext(testCtx, "pki/roles/test-role/acme/directory") - require.Error(t, err, "failed to forbid usage of test-role") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/default/roles/test-role/acme/directory") - require.Error(t, err, "failed to forbid usage of test-role under default issuer") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/int-ca/roles/test-role/acme/directory") - require.Error(t, err, "failed to forbid usage of test-role under int-ca issuer") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/root-ca/roles/test-role/acme/directory") - require.Error(t, err, "failed to forbid usage of test-role under root-ca issuer") - - _, err = client.Logical().ReadWithContext(testCtx, "pki/roles/acme/acme/directory") - require.NoError(t, err, "failed to allow usage of acme") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/default/roles/acme/acme/directory") - require.NoError(t, err, "failed to allow usage of acme under default issuer") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/int-ca/roles/acme/acme/directory") - require.NoError(t, err, "failed to allow usage of acme under int-ca issuer") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/root-ca/roles/acme/acme/directory") - require.NoError(t, err, "failed to allow usage of acme under root-ca issuer") - - // 2. Use a forbidden issuer should fail. - resp, err = client.Logical().WriteWithContext(testCtx, "pki/config/acme", map[string]interface{}{ - "allowed_roles": []string{"acme"}, - "allowed_issuers": []string{"int-ca"}, - }) - require.NoError(t, err, "failed to write config") - require.NotNil(t, resp) - - _, err = client.Logical().ReadWithContext(testCtx, "pki/roles/test-role/acme/directory") - require.Error(t, err, "failed to forbid usage of test-role") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/default/roles/test-role/acme/directory") - require.Error(t, err, "failed to forbid usage of test-role under default issuer") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/int-ca/roles/test-role/acme/directory") - require.Error(t, err, "failed to forbid usage of test-role under int-ca issuer") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/root-ca/roles/test-role/acme/directory") - require.Error(t, err, "failed to forbid usage of test-role under root-ca issuer") - - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/root-ca/roles/acme/acme/directory") - require.Error(t, err, "failed to forbid usage of acme under root-ca issuer") - - _, err = client.Logical().ReadWithContext(testCtx, "pki/roles/acme/acme/directory") - require.NoError(t, err, "failed to allow usage of acme") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/default/roles/acme/acme/directory") - require.NoError(t, err, "failed to allow usage of acme under default issuer") - _, err = client.Logical().ReadWithContext(testCtx, "pki/issuer/int-ca/roles/acme/acme/directory") - require.NoError(t, err, "failed to allow usage of acme under int-ca issuer") - - // 3. Setting the default directory to be a sign-verbatim policy and - // using two different CAs should result in certs signed by each CA. - resp, err = client.Logical().WriteWithContext(testCtx, "pki/config/acme", map[string]interface{}{ - "allowed_roles": []string{"*"}, - "allowed_issuers": []string{"*"}, - "default_directory_policy": "sign-verbatim", - }) - require.NoError(t, err, "failed to write config") - require.NotNil(t, resp) - - // default == int-ca - acmeClientDefault := getAcmeClientForCluster(t, cluster, "/v1/pki/issuer/default/acme/", nil) - defaultLeafCert := doACMEForDomainWithDNS(t, dns, acmeClientDefault, []string{"default-ca.dadgarcorp.com"}) - requireSignedByAtPath(t, client, defaultLeafCert, "pki/issuer/int-ca") - - acmeClientIntCA := getAcmeClientForCluster(t, cluster, "/v1/pki/issuer/int-ca/acme/", nil) - intCALeafCert := doACMEForDomainWithDNS(t, dns, acmeClientIntCA, []string{"int-ca.dadgarcorp.com"}) - requireSignedByAtPath(t, client, intCALeafCert, "pki/issuer/int-ca") - - acmeClientRootCA := getAcmeClientForCluster(t, cluster, "/v1/pki/issuer/root-ca/acme/", nil) - rootCALeafCert := doACMEForDomainWithDNS(t, dns, acmeClientRootCA, []string{"root-ca.dadgarcorp.com"}) - requireSignedByAtPath(t, client, rootCALeafCert, "pki/issuer/root-ca") - - // 4. Using a role-based default directory should allow us to control leaf - // issuance on the base and issuer-specific directories. - resp, err = client.Logical().WriteWithContext(testCtx, "pki/config/acme", map[string]interface{}{ - "allowed_roles": []string{"*"}, - "allowed_issuers": []string{"*"}, - "default_directory_policy": "role:acme", - }) - require.NoError(t, err, "failed to write config") - require.NotNil(t, resp) - - resp, err = client.Logical().JSONMergePatch(testCtx, "pki/roles/acme", map[string]interface{}{ - "ou": "IT Security", - "organization": []string{"Dadgar Corporation, Limited"}, - "allow_any_name": true, - }) - require.NoError(t, err, "failed to write role differentiator") - require.NotNil(t, resp) - - for _, issuer := range []string{"", "default", "int-ca", "root-ca"} { - // Path should override role. - directory := "/v1/pki/issuer/" + issuer + "/acme/" - issuerPath := "/pki/issuer/" + issuer - if issuer == "" { - directory = "/v1/pki/acme/" - issuerPath = "/pki/issuer/int-ca" - } else if issuer == "default" { - issuerPath = "/pki/issuer/int-ca" - } - - t.Logf("using directory: %v / issuer: %v", directory, issuerPath) - - acmeClient := getAcmeClientForCluster(t, cluster, directory, nil) - leafCert := doACMEForDomainWithDNS(t, dns, acmeClient, []string{"role-restricted.dadgarcorp.com"}) - require.Contains(t, leafCert.Subject.Organization, "Dadgar Corporation, Limited", "on directory: %v", directory) - require.Contains(t, leafCert.Subject.OrganizationalUnit, "IT Security", "on directory: %v", directory) - requireSignedByAtPath(t, client, leafCert, issuerPath) - } - - // 5. -} - -// TestAcmeWithCsrIncludingBasicConstraintExtension verify that we error out for a CSR that is requesting a -// certificate with the IsCA set to true, false is okay, within the basic constraints extension and that no matter what -// the extension is not present on the returned certificate. -func TestAcmeWithCsrIncludingBasicConstraintExtension(t *testing.T) { - t.Parallel() - - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - testCtx := context.Background() - - baseAcmeURL := "/v1/pki/acme/" - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - // Create new account - t.Logf("Testing register on %s", baseAcmeURL) - acct, err := acmeClient.Register(testCtx, &acme.Account{}, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - - // Create an order - t.Logf("Testing Authorize Order on %s", baseAcmeURL) - identifiers := []string{"*.localdomain"} - order, err := acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{ - {Type: "dns", Value: identifiers[0]}, - }) - require.NoError(t, err, "failed creating order") - - // HACK: Update authorization/challenge to completed as we can't really do it properly in this workflow test. - markAuthorizationSuccess(t, client, acmeClient, acct, order) - - // Build a CSR with IsCA set to true, making sure we reject it - extension, err := certutil.CreateBasicConstraintExtension(true, -1) - require.NoError(t, err, "failed generating basic constraint extension") - - isCATrueCSR := &x509.CertificateRequest{ - DNSNames: []string{identifiers[0]}, - ExtraExtensions: []pkix.Extension{extension}, - } - csrKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generated key for CSR") - csr, err := x509.CreateCertificateRequest(rand.Reader, isCATrueCSR, csrKey) - require.NoError(t, err, "failed generating csr") - - _, _, err = acmeClient.CreateOrderCert(testCtx, order.FinalizeURL, csr, true) - require.Error(t, err, "order finalization should have failed with IsCA set to true") - - extension, err = certutil.CreateBasicConstraintExtension(false, -1) - require.NoError(t, err, "failed generating basic constraint extension") - isCAFalseCSR := &x509.CertificateRequest{ - DNSNames: []string{identifiers[0]}, - Extensions: []pkix.Extension{extension}, - } - - csr, err = x509.CreateCertificateRequest(rand.Reader, isCAFalseCSR, csrKey) - require.NoError(t, err, "failed generating csr") - - certs, _, err := acmeClient.CreateOrderCert(testCtx, order.FinalizeURL, csr, true) - require.NoError(t, err, "order finalization should have failed with IsCA set to false") - - require.GreaterOrEqual(t, len(certs), 1, "expected at least one cert in bundle") - acmeCert, err := x509.ParseCertificate(certs[0]) - require.NoError(t, err, "failed parsing acme cert") - - // Make sure we don't have any basic constraint extension within the returned cert - for _, ext := range acmeCert.Extensions { - if ext.Id.Equal(certutil.ExtensionBasicConstraintsOID) { - // We shouldn't have this extension in our cert - t.Fatalf("acme csr contained a basic constraints extension") - } - } -} - -func markAuthorizationSuccess(t *testing.T, client *api.Client, acmeClient *acme.Client, acct *acme.Account, - order *acme.Order, -) { - testCtx := context.Background() - - pkiMount := findStorageMountUuid(t, client, "pki") - - // Delete any and all challenge validation entries to stop the engine from overwriting our hack here - i := 0 - for { - deleteCvEntries(t, client, pkiMount) - - accountId := acct.URI[strings.LastIndex(acct.URI, "/"):] - for _, authURI := range order.AuthzURLs { - authId := authURI[strings.LastIndex(authURI, "/"):] - - rawPath := path.Join("/sys/raw/logical/", pkiMount, getAuthorizationPath(accountId, authId)) - resp, err := client.Logical().ReadWithContext(testCtx, rawPath) - require.NoError(t, err, "failed looking up authorization storage") - require.NotNil(t, resp, "sys raw response was nil") - require.NotEmpty(t, resp.Data["value"], "no value field in sys raw response") - - var authz ACMEAuthorization - err = jsonutil.DecodeJSON([]byte(resp.Data["value"].(string)), &authz) - require.NoError(t, err, "error decoding authorization: %w", err) - authz.Status = ACMEAuthorizationValid - for _, challenge := range authz.Challenges { - challenge.Status = ACMEChallengeValid - } - - encodeJSON, err := jsonutil.EncodeJSON(authz) - require.NoError(t, err, "failed encoding authz json") - _, err = client.Logical().WriteWithContext(testCtx, rawPath, map[string]interface{}{ - "value": base64.StdEncoding.EncodeToString(encodeJSON), - "encoding": "base64", - }) - require.NoError(t, err, "failed writing authorization storage") - } - - // Give some time - time.Sleep(200 * time.Millisecond) - - // Check to see if we have fixed up the status and no new entries have appeared. - if !deleteCvEntries(t, client, pkiMount) { - // No entries found - // Look to see if we raced against the engine - orderLookup, err := acmeClient.GetOrder(testCtx, order.URI) - require.NoError(t, err, "failed loading order status after manually ") - - if orderLookup.Status == string(ACMEOrderReady) { - // Our order seems to be in the proper status, should be safe-ish to go ahead now - break - } else { - t.Logf("order status was not ready, retrying") - } - } else { - t.Logf("new challenge entries appeared after deletion, retrying") - } - - if i > 5 { - t.Fatalf("We are constantly deleting cv entries or order status is not changing, something is wrong") - } - - i++ - } -} - -func deleteCvEntries(t *testing.T, client *api.Client, pkiMount string) bool { - testCtx := context.Background() - - cvPath := path.Join("/sys/raw/logical/", pkiMount, acmeValidationPrefix) - resp, err := client.Logical().ListWithContext(testCtx, cvPath) - require.NoError(t, err, "failed listing cv path items") - - deletedEntries := false - if resp != nil { - cvEntries := resp.Data["keys"].([]interface{}) - for _, cvEntry := range cvEntries { - cvEntryPath := path.Join(cvPath, cvEntry.(string)) - _, err = client.Logical().DeleteWithContext(testCtx, cvEntryPath) - require.NoError(t, err, "failed to delete cv entry") - deletedEntries = true - } - } - - return deletedEntries -} - -func setupAcmeBackend(t *testing.T) (*vault.TestCluster, *api.Client, string) { - cluster, client := setupTestPkiCluster(t) - - return setupAcmeBackendOnClusterAtPath(t, cluster, client, "pki") -} - -func setupAcmeBackendOnClusterAtPath(t *testing.T, cluster *vault.TestCluster, client *api.Client, mount string) (*vault.TestCluster, *api.Client, string) { - mount = strings.Trim(mount, "/") - - // Setting templated AIAs should succeed. - pathConfig := client.Address() + "/v1/" + mount - - namespace := "" - mountName := mount - if mount != "pki" { - if strings.Contains(mount, "/") && constants.IsEnterprise { - ns_pieces := strings.Split(mount, "/") - c := len(ns_pieces) - // mount is c-1 - ns_name := ns_pieces[c-2] - if len(ns_pieces) > 2 { - // Parent's namespaces - parent := strings.Join(ns_pieces[0:c-2], "/") - _, err := client.WithNamespace(parent).Logical().Write("/sys/namespaces/"+ns_name, nil) - require.NoError(t, err, "failed to create nested namespaces "+parent+" -> "+ns_name) - } else { - _, err := client.Logical().Write("/sys/namespaces/"+ns_name, nil) - require.NoError(t, err, "failed to create nested namespace "+ns_name) - } - namespace = strings.Join(ns_pieces[0:c-1], "/") - mountName = ns_pieces[c-1] - } - - err := client.WithNamespace(namespace).Sys().Mount(mountName, &api.MountInput{ - Type: "pki", - Config: api.MountConfigInput{ - DefaultLeaseTTL: "3000h", - MaxLeaseTTL: "600000h", - }, - }) - require.NoError(t, err, "failed to mount new PKI instance at "+mount) - } - - err := client.Sys().TuneMountWithContext(ctx, mountName, api.MountConfigInput{ - DefaultLeaseTTL: "3000h", - MaxLeaseTTL: "600000h", - }) - require.NoError(t, err, "failed updating mount lease times "+mount) - - _, err = client.Logical().WriteWithContext(context.Background(), mount+"/config/cluster", map[string]interface{}{ - "path": pathConfig, - "aia_path": "http://localhost:8200/cdn/" + mount, - }) - require.NoError(t, err) - - _, err = client.Logical().WriteWithContext(context.Background(), mount+"/config/acme", map[string]interface{}{ - "enabled": true, - "eab_policy": "not-required", - }) - require.NoError(t, err) - - // Allow certain headers to pass through for ACME support - _, err = client.WithNamespace(namespace).Logical().WriteWithContext(context.Background(), "sys/mounts/"+mountName+"/tune", map[string]interface{}{ - "allowed_response_headers": []string{"Last-Modified", "Replay-Nonce", "Link", "Location"}, - "max_lease_ttl": "920000h", - }) - require.NoError(t, err, "failed tuning mount response headers") - - resp, err := client.Logical().WriteWithContext(context.Background(), mount+"/issuers/generate/root/internal", - map[string]interface{}{ - "issuer_name": "root-ca", - "key_name": "root-key", - "key_type": "ec", - "common_name": "root.com", - "ttl": "7200h", - "max_ttl": "920000h", - }) - require.NoError(t, err, "failed creating root CA") - - resp, err = client.Logical().WriteWithContext(context.Background(), mount+"/issuers/generate/intermediate/internal", - map[string]interface{}{ - "key_name": "int-key", - "key_type": "ec", - "common_name": "test.com", - }) - require.NoError(t, err, "failed creating intermediary CSR") - intermediateCSR := resp.Data["csr"].(string) - - // Sign the intermediate CSR using /pki - resp, err = client.Logical().Write(mount+"/issuer/root-ca/sign-intermediate", map[string]interface{}{ - "csr": intermediateCSR, - "ttl": "7100h", - "max_ttl": "910000h", - }) - require.NoError(t, err, "failed signing intermediary CSR") - intermediateCertPEM := resp.Data["certificate"].(string) - - // Configure the intermediate cert as the CA in /pki2 - resp, err = client.Logical().Write(mount+"/issuers/import/cert", map[string]interface{}{ - "pem_bundle": intermediateCertPEM, - }) - require.NoError(t, err, "failed importing intermediary cert") - importedIssuersRaw := resp.Data["imported_issuers"].([]interface{}) - require.Len(t, importedIssuersRaw, 1) - intCaUuid := importedIssuersRaw[0].(string) - - _, err = client.Logical().Write(mount+"/issuer/"+intCaUuid, map[string]interface{}{ - "issuer_name": "int-ca", - }) - require.NoError(t, err, "failed updating issuer name") - - _, err = client.Logical().Write(mount+"/config/issuers", map[string]interface{}{ - "default": "int-ca", - }) - require.NoError(t, err, "failed updating default issuer") - - _, err = client.Logical().Write(mount+"/roles/test-role", map[string]interface{}{ - "ttl_duration": "168h", - "max_ttl_duration": "168h", - "key_type": "any", - "allowed_domains": "localdomain", - "allow_subdomains": "true", - "allow_wildcard_certificates": "true", - }) - require.NoError(t, err, "failed creating role test-role") - - _, err = client.Logical().Write(mount+"/roles/acme", map[string]interface{}{ - "ttl_duration": "3650h", - "max_ttl_duration": "7200h", - "key_type": "any", - }) - require.NoError(t, err, "failed creating role acme") - - return cluster, client, pathConfig -} - -func testAcmeCertSignedByCa(t *testing.T, client *api.Client, derCerts [][]byte, issuerRef string) { - t.Helper() - require.NotEmpty(t, derCerts) - acmeCert, err := x509.ParseCertificate(derCerts[0]) - require.NoError(t, err, "failed parsing acme cert bytes") - - resp, err := client.Logical().ReadWithContext(context.Background(), "pki/issuer/"+issuerRef) - require.NoError(t, err, "failed reading issuer with name %s", issuerRef) - issuerCert := parseCert(t, resp.Data["certificate"].(string)) - issuerChainRaw := resp.Data["ca_chain"].([]interface{}) - - err = acmeCert.CheckSignatureFrom(issuerCert) - require.NoError(t, err, "issuer %s did not sign provided cert", issuerRef) - - expectedCerts := [][]byte{derCerts[0]} - - for _, entry := range issuerChainRaw { - chainCert := parseCert(t, entry.(string)) - expectedCerts = append(expectedCerts, chainCert.Raw) - } - - if diffs := deep.Equal(expectedCerts, derCerts); diffs != nil { - t.Fatalf("diffs were found between the acme chain returned and the expected value: \n%v", diffs) - } -} - -// TestAcmeValidationError make sure that we properly return errors on validation errors. -func TestAcmeValidationError(t *testing.T) { - t.Parallel() - cluster, _, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - testCtx := context.Background() - baseAcmeURL := "/v1/pki/acme/" - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - // Create new account - t.Logf("Testing register on %s", baseAcmeURL) - _, err = acmeClient.Register(testCtx, &acme.Account{}, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - - // Create an order - t.Logf("Testing Authorize Order on %s", baseAcmeURL) - identifiers := []string{"www.dadgarcorp.com"} - order, err := acmeClient.AuthorizeOrder(testCtx, []acme.AuthzID{ - {Type: "dns", Value: identifiers[0]}, - }) - require.NoError(t, err, "failed creating order") - - // Load authorizations - var authorizations []*acme.Authorization - for _, authUrl := range order.AuthzURLs { - auth, err := acmeClient.GetAuthorization(testCtx, authUrl) - require.NoError(t, err, "failed fetching authorization: %s", authUrl) - - authorizations = append(authorizations, auth) - } - require.Len(t, authorizations, 1, "expected a certain number of authorizations") - require.Len(t, authorizations[0].Challenges, 3, "expected a certain number of challenges associated with authorization") - - acceptedAuth, err := acmeClient.Accept(testCtx, authorizations[0].Challenges[0]) - require.NoError(t, err, "Should have been allowed to accept challenge 1") - require.Equal(t, string(ACMEChallengeProcessing), acceptedAuth.Status) - - _, err = acmeClient.Accept(testCtx, authorizations[0].Challenges[1]) - require.Error(t, err, "Should have been prevented to accept challenge 2") - - // Make sure our challenge returns errors - testhelpers.RetryUntil(t, 30*time.Second, func() error { - challenge, err := acmeClient.GetChallenge(testCtx, authorizations[0].Challenges[0].URI) - if err != nil { - return err - } - - if challenge.Error == nil { - return fmt.Errorf("no error set in challenge yet") - } - - acmeError, ok := challenge.Error.(*acme.Error) - if !ok { - return fmt.Errorf("unexpected error back: %v", err) - } - - if acmeError.ProblemType != "urn:ietf:params:acme:error:incorrectResponse" { - return fmt.Errorf("unexpected ACME error back: %v", acmeError) - } - - return nil - }) - - // Make sure our challenge,auth and order status change. - // This takes a little too long to run in CI properly, we need the ability to influence - // how long the validations take before CI can go wild on this. - if os.Getenv("CI") == "" { - testhelpers.RetryUntil(t, 10*time.Minute, func() error { - challenge, err := acmeClient.GetChallenge(testCtx, authorizations[0].Challenges[0].URI) - if err != nil { - return fmt.Errorf("failed to load challenge: %w", err) - } - - if challenge.Status != string(ACMEChallengeInvalid) { - return fmt.Errorf("challenge state was not changed to invalid: %v", challenge) - } - - authz, err := acmeClient.GetAuthorization(testCtx, authorizations[0].URI) - if err != nil { - return fmt.Errorf("failed to load authorization: %w", err) - } - - if authz.Status != string(ACMEAuthorizationInvalid) { - return fmt.Errorf("authz state was not changed to invalid: %v", authz) - } - - myOrder, err := acmeClient.GetOrder(testCtx, order.URI) - if err != nil { - return fmt.Errorf("failed to load order: %w", err) - } - - if myOrder.Status != string(ACMEOrderInvalid) { - return fmt.Errorf("order state was not changed to invalid: %v", order) - } - - return nil - }) - } -} - -func setupTestPkiCluster(t *testing.T) (*vault.TestCluster, *api.Client) { - coreConfig := &vault.CoreConfig{ - LogicalBackends: map[string]logical.Factory{ - "pki": Factory, - }, - EnableRaw: true, - } - cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - }) - cluster.Start() - client := cluster.Cores[0].Client - mountPKIEndpoint(t, client, "pki") - return cluster, client -} - -func getAcmeClientForCluster(t *testing.T, cluster *vault.TestCluster, baseUrl string, key crypto.Signer) *acme.Client { - coreAddr := cluster.Cores[0].Listeners[0].Address - tlsConfig := cluster.Cores[0].TLSConfig() - - transport := cleanhttp.DefaultPooledTransport() - transport.TLSClientConfig = tlsConfig.Clone() - if err := http2.ConfigureTransport(transport); err != nil { - t.Fatal(err) - } - httpClient := &http.Client{Transport: transport} - if baseUrl[0] == '/' { - baseUrl = baseUrl[1:] - } - if !strings.HasPrefix(baseUrl, "v1/") { - baseUrl = "v1/" + baseUrl - } - baseAcmeURL := fmt.Sprintf("https://%s/%s", coreAddr.String(), baseUrl) - return &acme.Client{ - Key: key, - HTTPClient: httpClient, - DirectoryURL: baseAcmeURL + "directory", - } -} - -func getEABKey(t *testing.T, client *api.Client, baseUrl string) (string, []byte) { - resp, err := client.Logical().WriteWithContext(ctx, path.Join("pki/", baseUrl, "/new-eab"), map[string]interface{}{}) - require.NoError(t, err, "failed getting eab key") - require.NotNil(t, resp, "eab key returned nil response") - require.NotEmpty(t, resp.Data["id"], "eab key response missing id field") - kid := resp.Data["id"].(string) - - require.NotEmpty(t, resp.Data["key"], "eab key response missing private_key field") - base64Key := resp.Data["key"].(string) - require.True(t, strings.HasPrefix(base64Key, "vault-eab-0-"), "%s should have had a prefix of vault-eab-0-", base64Key) - privateKeyBytes, err := base64.RawURLEncoding.DecodeString(base64Key) - require.NoError(t, err, "failed base 64 decoding eab key response") - - require.Equal(t, "hs", resp.Data["key_type"], "eab key_type field mis-match") - require.Equal(t, baseUrl+"directory", resp.Data["acme_directory"], "eab acme_directory field mis-match") - require.NotEmpty(t, resp.Data["created_on"], "empty created_on field") - _, err = time.Parse(time.RFC3339, resp.Data["created_on"].(string)) - require.NoError(t, err, "failed parsing eab created_on field") - - return kid, privateKeyBytes -} - -func TestACMEClientRequestLimits(t *testing.T) { - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - cases := []struct { - name string - authorizations []acme.AuthzID - requestCSR x509.CertificateRequest - valid bool - }{ - { - "validate-only-cn", - []acme.AuthzID{ - {"dns", "localhost"}, - }, - x509.CertificateRequest{ - Subject: pkix.Name{CommonName: "localhost"}, - }, - true, - }, - { - "validate-only-san", - []acme.AuthzID{ - {"dns", "localhost"}, - }, - x509.CertificateRequest{ - DNSNames: []string{"localhost"}, - }, - true, - }, - { - "validate-only-ip-address", - []acme.AuthzID{ - {"ip", "127.0.0.1"}, - }, - x509.CertificateRequest{ - IPAddresses: []net.IP{{127, 0, 0, 1}}, - }, - true, - }, - } - - testCtx := context.Background() - acmeConfig := map[string]interface{}{ - "enabled": true, - "allowed_issuers": "*", - "allowed_roles": "*", - "default_directory_policy": "sign-verbatim", - "dns_resolver": "", - "eab_policy_name": "", - } - _, err := client.Logical().WriteWithContext(testCtx, "pki/config/acme", acmeConfig) - require.NoError(t, err, "error configuring acme") - - for _, tc := range cases { - - // First Create Our Client - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - acmeClient := getAcmeClientForCluster(t, cluster, "/v1/pki/acme/", accountKey) - - discovery, err := acmeClient.Discover(testCtx) - require.NoError(t, err, "failed acme discovery call") - t.Logf("%v", discovery) - - acct, err := acmeClient.Register(testCtx, &acme.Account{ - Contact: []string{"mailto:test@example.com"}, - }, func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - require.Equal(t, acme.StatusValid, acct.Status) - require.Contains(t, acct.Contact, "mailto:test@example.com") - require.Len(t, acct.Contact, 1) - - // Create an order - t.Logf("Testing Authorize Order on %s", "pki/acme") - identifiers := make([]string, len(tc.authorizations)) - for index, auth := range tc.authorizations { - identifiers[index] = auth.Value - } - - createOrder, err := acmeClient.AuthorizeOrder(testCtx, tc.authorizations) - require.NoError(t, err, "failed creating order") - require.Equal(t, acme.StatusPending, createOrder.Status) - require.Empty(t, createOrder.CertURL) - require.Equal(t, createOrder.URI+"/finalize", createOrder.FinalizeURL) - require.Len(t, createOrder.AuthzURLs, len(tc.authorizations), "expected same number of authzurls as identifiers") - - // HACK: Update authorization/challenge to completed as we can't really do it properly in this workflow - // test. - markAuthorizationSuccess(t, client, acmeClient, acct, createOrder) - - // Submit the CSR - csrKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - require.NoError(t, err, "failed generated key for CSR") - csr, err := x509.CreateCertificateRequest(rand.Reader, &tc.requestCSR, csrKey) - require.NoError(t, err, "failed generating csr") - - certs, _, err := acmeClient.CreateOrderCert(testCtx, createOrder.FinalizeURL, csr, true) - - if tc.valid { - require.NoError(t, err, "failed finalizing order") - - // Validate we get a signed cert back - testAcmeCertSignedByCa(t, client, certs, "int-ca") - } else { - require.Error(t, err, "Not a valid CSR, should err") - } - } -} diff --git a/builtin/logical/pki/path_config_acme.go b/builtin/logical/pki/path_config_acme.go deleted file mode 100644 index 6b1b78bc37fe0d..00000000000000 --- a/builtin/logical/pki/path_config_acme.go +++ /dev/null @@ -1,372 +0,0 @@ -package pki - -import ( - "context" - "fmt" - "net" - "os" - "strconv" - "strings" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/helper/errutil" - "github.com/hashicorp/vault/sdk/logical" -) - -const ( - storageAcmeConfig = "config/acme" - pathConfigAcmeHelpSyn = "Configuration of ACME Endpoints" - pathConfigAcmeHelpDesc = "Here we configure:\n\nenabled=false, whether ACME is enabled, defaults to false meaning that clusters will by default not get ACME support,\nallowed_issuers=\"default\", which issuers are allowed for use with ACME; by default, this will only be the primary (default) issuer,\nallowed_roles=\"*\", which roles are allowed for use with ACME; by default these will be all roles matching our selection criteria,\ndefault_directory_policy=\"\", either \"forbid\", preventing the default directory from being used at all, \"role:\" which is the role to be used for non-role-qualified ACME requests; or \"sign-verbatim\", the default meaning ACME issuance will be equivalent to sign-verbatim.,\ndns_resolver=\"\", which specifies a custom DNS resolver to use for all ACME-related DNS lookups" - disableAcmeEnvVar = "VAULT_DISABLE_PUBLIC_ACME" -) - -type acmeConfigEntry struct { - Enabled bool `json:"enabled"` - AllowedIssuers []string `json:"allowed_issuers="` - AllowedRoles []string `json:"allowed_roles"` - DefaultDirectoryPolicy string `json:"default_directory_policy"` - DNSResolver string `json:"dns_resolver"` - EabPolicyName EabPolicyName `json:"eab_policy_name"` -} - -var defaultAcmeConfig = acmeConfigEntry{ - Enabled: false, - AllowedIssuers: []string{"*"}, - AllowedRoles: []string{"*"}, - DefaultDirectoryPolicy: "sign-verbatim", - DNSResolver: "", - EabPolicyName: eabPolicyNotRequired, -} - -func (sc *storageContext) getAcmeConfig() (*acmeConfigEntry, error) { - entry, err := sc.Storage.Get(sc.Context, storageAcmeConfig) - if err != nil { - return nil, err - } - - var mapping acmeConfigEntry - if entry == nil { - mapping = defaultAcmeConfig - return &mapping, nil - } - - if err := entry.DecodeJSON(&mapping); err != nil { - return nil, errutil.InternalError{Err: fmt.Sprintf("unable to decode ACME configuration: %v", err)} - } - - return &mapping, nil -} - -func (sc *storageContext) setAcmeConfig(entry *acmeConfigEntry) error { - json, err := logical.StorageEntryJSON(storageAcmeConfig, entry) - if err != nil { - return fmt.Errorf("failed creating storage entry: %w", err) - } - - if err := sc.Storage.Put(sc.Context, json); err != nil { - return fmt.Errorf("failed writing storage entry: %w", err) - } - - sc.Backend.acmeState.markConfigDirty() - return nil -} - -func pathAcmeConfig(b *backend) *framework.Path { - return &framework.Path{ - Pattern: "config/acme", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - - Fields: map[string]*framework.FieldSchema{ - "enabled": { - Type: framework.TypeBool, - Description: `whether ACME is enabled, defaults to false meaning that clusters will by default not get ACME support`, - Default: false, - }, - "allowed_issuers": { - Type: framework.TypeCommaStringSlice, - Description: `which issuers are allowed for use with ACME; by default, this will only be the primary (default) issuer`, - Default: []string{"*"}, - }, - "allowed_roles": { - Type: framework.TypeCommaStringSlice, - Description: `which roles are allowed for use with ACME; by default via '*', these will be all roles including sign-verbatim; when concrete role names are specified, any default_directory_policy role must be included to allow usage of the default acme directories under /pki/acme/directory and /pki/issuer/:issuer_id/acme/directory.`, - Default: []string{"*"}, - }, - "default_directory_policy": { - Type: framework.TypeString, - Description: `the policy to be used for non-role-qualified ACME requests; by default ACME issuance will be otherwise unrestricted, equivalent to the sign-verbatim endpoint; one may also specify a role to use as this policy, as "role:", the specified role must be allowed by allowed_roles`, - Default: "sign-verbatim", - }, - "dns_resolver": { - Type: framework.TypeString, - Description: `DNS resolver to use for domain resolution on this mount. Defaults to using the default system resolver. Must be in the format :, with both parts mandatory.`, - Default: "", - }, - "eab_policy": { - Type: framework.TypeString, - Description: `Specify the policy to use for external account binding behaviour, 'not-required', 'new-account-required' or 'always-required'`, - Default: "always-required", - }, - }, - - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "acme-configuration", - }, - Callback: b.pathAcmeRead, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathAcmeWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "acme", - }, - // Read more about why these flags are set in backend.go. - ForwardPerformanceStandby: true, - ForwardPerformanceSecondary: true, - }, - }, - - HelpSynopsis: pathConfigAcmeHelpSyn, - HelpDescription: pathConfigAcmeHelpDesc, - } -} - -func (b *backend) pathAcmeRead(ctx context.Context, req *logical.Request, _ *framework.FieldData) (*logical.Response, error) { - sc := b.makeStorageContext(ctx, req.Storage) - config, err := sc.getAcmeConfig() - if err != nil { - return nil, err - } - - var warnings []string - if config.Enabled { - _, err := getBasePathFromClusterConfig(sc) - if err != nil { - warnings = append(warnings, err.Error()) - } - } - - return genResponseFromAcmeConfig(config, warnings), nil -} - -func genResponseFromAcmeConfig(config *acmeConfigEntry, warnings []string) *logical.Response { - response := &logical.Response{ - Data: map[string]interface{}{ - "allowed_roles": config.AllowedRoles, - "allowed_issuers": config.AllowedIssuers, - "default_directory_policy": config.DefaultDirectoryPolicy, - "enabled": config.Enabled, - "dns_resolver": config.DNSResolver, - "eab_policy": config.EabPolicyName, - }, - Warnings: warnings, - } - - // TODO: Add some nice warning if we are on a replication cluster and path isn't set - - return response -} - -func (b *backend) pathAcmeWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) { - sc := b.makeStorageContext(ctx, req.Storage) - - config, err := sc.getAcmeConfig() - if err != nil { - return nil, err - } - - if enabledRaw, ok := d.GetOk("enabled"); ok { - config.Enabled = enabledRaw.(bool) - } - - if allowedRolesRaw, ok := d.GetOk("allowed_roles"); ok { - config.AllowedRoles = allowedRolesRaw.([]string) - if len(config.AllowedRoles) == 0 { - return nil, fmt.Errorf("allowed_roles must take a non-zero length value; specify '*' as the value to allow anything or specify enabled=false to disable ACME entirely") - } - } - - if defaultDirectoryPolicyRaw, ok := d.GetOk("default_directory_policy"); ok { - config.DefaultDirectoryPolicy = defaultDirectoryPolicyRaw.(string) - } - - if allowedIssuersRaw, ok := d.GetOk("allowed_issuers"); ok { - config.AllowedIssuers = allowedIssuersRaw.([]string) - if len(config.AllowedIssuers) == 0 { - return nil, fmt.Errorf("allowed_issuers must take a non-zero length value; specify '*' as the value to allow anything or specify enabled=false to disable ACME entirely") - } - } - - if dnsResolverRaw, ok := d.GetOk("dns_resolver"); ok { - config.DNSResolver = dnsResolverRaw.(string) - if config.DNSResolver != "" { - addr, _, err := net.SplitHostPort(config.DNSResolver) - if err != nil { - return nil, fmt.Errorf("failed to parse DNS resolver address: %w", err) - } - if addr == "" { - return nil, fmt.Errorf("failed to parse DNS resolver address: got empty address") - } - if net.ParseIP(addr) == nil { - return nil, fmt.Errorf("failed to parse DNS resolver address: expected IPv4/IPv6 address, likely got hostname") - } - } - } - - if eabPolicyRaw, ok := d.GetOk("eab_policy"); ok { - eabPolicy, err := getEabPolicyByString(eabPolicyRaw.(string)) - if err != nil { - return nil, fmt.Errorf("invalid eab policy name provided, valid values are '%s', '%s', '%s'", - eabPolicyNotRequired, eabPolicyNewAccountRequired, eabPolicyAlwaysRequired) - } - config.EabPolicyName = eabPolicy.Name - } - - // Validate Default Directory Behavior: - defaultDirectoryPolicyType, err := getDefaultDirectoryPolicyType(config.DefaultDirectoryPolicy) - if err != nil { - return nil, fmt.Errorf("invalid default_directory_policy: %w", err) - } - defaultDirectoryRoleName := "" - switch defaultDirectoryPolicyType { - case Forbid: - case SignVerbatim: - case Role: - defaultDirectoryRoleName, err = getDefaultDirectoryPolicyRole(config.DefaultDirectoryPolicy) - if err != nil { - return nil, fmt.Errorf("failed extracting role name from default directory policy %w", err) - } - - _, err := getAndValidateAcmeRole(sc, defaultDirectoryRoleName) - if err != nil { - return nil, fmt.Errorf("default directory policy role %v is not a valid ACME role: %w", defaultDirectoryRoleName, err) - } - default: - return nil, fmt.Errorf("validation for the type of policy defined by %v is undefined", config.DefaultDirectoryPolicy) - } - - // Validate Allowed Roles - allowAnyRole := len(config.AllowedRoles) == 1 && config.AllowedRoles[0] == "*" - foundDefault := false - if !allowAnyRole { - for index, name := range config.AllowedRoles { - if name == "*" { - return nil, fmt.Errorf("cannot use '*' as role name at index %d", index) - } - - _, err := getAndValidateAcmeRole(sc, name) - if err != nil { - return nil, fmt.Errorf("allowed_role %v is not a valid acme role: %w", name, err) - } - - if defaultDirectoryPolicyType == Role && name == defaultDirectoryRoleName { - foundDefault = true - } - } - - if !foundDefault && defaultDirectoryPolicyType == Role { - return nil, fmt.Errorf("default directory policy %v was not specified in allowed_roles: %v", config.DefaultDirectoryPolicy, config.AllowedRoles) - } - } - - allowAnyIssuer := len(config.AllowedIssuers) == 1 && config.AllowedIssuers[0] == "*" - if !allowAnyIssuer { - for index, name := range config.AllowedIssuers { - if name == "*" { - return nil, fmt.Errorf("cannot use '*' as issuer name at index %d", index) - } - - _, err := sc.resolveIssuerReference(name) - if err != nil { - return nil, fmt.Errorf("failed validating allowed_issuers: unable to fetch issuer: %v: %w", name, err) - } - } - } - - // Check to make sure that we have a proper value for the cluster path which ACME requires - if config.Enabled { - _, err = getBasePathFromClusterConfig(sc) - if err != nil { - return nil, err - } - } - - var warnings []string - // Lastly lets verify that the configuration is honored/invalidated by the public ACME env var. - isPublicAcmeDisabledByEnv, err := isPublicACMEDisabledByEnv() - if err != nil { - warnings = append(warnings, err.Error()) - } - if isPublicAcmeDisabledByEnv && config.Enabled { - eabPolicy := getEabPolicyByName(config.EabPolicyName) - if !eabPolicy.OverrideEnvDisablingPublicAcme() { - resp := logical.ErrorResponse("%s env var is enabled, ACME EAB policy needs to be '%s' with ACME enabled", - disableAcmeEnvVar, eabPolicyAlwaysRequired) - resp.Warnings = warnings - return resp, nil - } - } - - err = sc.setAcmeConfig(config) - if err != nil { - return nil, err - } - - return genResponseFromAcmeConfig(config, warnings), nil -} - -func isPublicACMEDisabledByEnv() (bool, error) { - disableAcmeRaw, ok := os.LookupEnv(disableAcmeEnvVar) - if !ok { - return false, nil - } - - disableAcme, err := strconv.ParseBool(disableAcmeRaw) - if err != nil { - // So the environment variable was set but we couldn't parse the value as a string, assume - // the operator wanted public ACME disabled. - return true, fmt.Errorf("failed parsing environment variable %s: %w", disableAcmeEnvVar, err) - } - - return disableAcme, nil -} - -func getDefaultDirectoryPolicyType(defaultDirectoryPolicy string) (DefaultDirectoryPolicyType, error) { - switch { - case defaultDirectoryPolicy == "forbid": - return Forbid, nil - case defaultDirectoryPolicy == "sign-verbatim": - return SignVerbatim, nil - case strings.HasPrefix(defaultDirectoryPolicy, "role:"): - if len(defaultDirectoryPolicy) == 5 { - return Forbid, fmt.Errorf("no role specified by policy %v", defaultDirectoryPolicy) - } - return Role, nil - default: - return Forbid, fmt.Errorf("string %v not a valid Default Directory Policy", defaultDirectoryPolicy) - } -} - -func getDefaultDirectoryPolicyRole(defaultDirectoryPolicy string) (string, error) { - policyType, err := getDefaultDirectoryPolicyType(defaultDirectoryPolicy) - if err != nil { - return "", err - } - if policyType != Role { - return "", fmt.Errorf("default directory policy %v is not a role-based-policy", defaultDirectoryPolicy) - } - return defaultDirectoryPolicy[5:], nil -} - -type DefaultDirectoryPolicyType int - -const ( - Forbid DefaultDirectoryPolicyType = iota - SignVerbatim - Role -) diff --git a/builtin/logical/pki/path_config_acme_test.go b/builtin/logical/pki/path_config_acme_test.go deleted file mode 100644 index 93be569d667d40..00000000000000 --- a/builtin/logical/pki/path_config_acme_test.go +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pki - -import ( - "context" - "crypto/rand" - "crypto/rsa" - "testing" - "time" - - "github.com/stretchr/testify/require" -) - -func TestAcmeConfig(t *testing.T) { - cluster, client, _ := setupAcmeBackend(t) - defer cluster.Cleanup() - - cases := []struct { - name string - AcmeConfig map[string]interface{} - prefixUrl string - validConfig bool - works bool - }{ - {"unspecified-root", map[string]interface{}{ - "enabled": true, - "allowed_issuers": "*", - "allowed_roles": "*", - "dns_resolver": "", - "eab_policy_name": "", - }, "acme/", true, true}, - {"bad-policy-root", map[string]interface{}{ - "enabled": true, - "allowed_issuers": "*", - "allowed_roles": "*", - "default_directory_policy": "bad", - "dns_resolver": "", - "eab_policy_name": "", - }, "acme/", false, false}, - {"forbid-root", map[string]interface{}{ - "enabled": true, - "allowed_issuers": "*", - "allowed_roles": "*", - "default_directory_policy": "forbid", - "dns_resolver": "", - "eab_policy_name": "", - }, "acme/", true, false}, - {"sign-verbatim-root", map[string]interface{}{ - "enabled": true, - "allowed_issuers": "*", - "allowed_roles": "*", - "default_directory_policy": "sign-verbatim", - "dns_resolver": "", - "eab_policy_name": "", - }, "acme/", true, true}, - {"role-root", map[string]interface{}{ - "enabled": true, - "allowed_issuers": "*", - "allowed_roles": "*", - "default_directory_policy": "role:exists", - "dns_resolver": "", - "eab_policy_name": "", - }, "acme/", true, true}, - {"bad-role-root", map[string]interface{}{ - "enabled": true, - "allowed_issuers": "*", - "allowed_roles": "*", - "default_directory_policy": "role:notgood", - "dns_resolver": "", - "eab_policy_name": "", - }, "acme/", false, true}, - {"disallowed-role-root", map[string]interface{}{ - "enabled": true, - "allowed_issuers": "*", - "allowed_roles": "good", - "default_directory_policy": "role:exists", - "dns_resolver": "", - "eab_policy_name": "", - }, "acme/", false, false}, - } - - roleConfig := map[string]interface{}{ - "issuer_ref": "default", - "allowed_domains": "example.com", - "allow_subdomains": true, - "max_ttl": "720h", - } - - testCtx := context.Background() - - for _, tc := range cases { - deadline := time.Now().Add(time.Second * 10) - subTestCtx, _ := context.WithDeadline(testCtx, deadline) - - _, err := client.Logical().WriteWithContext(subTestCtx, "pki/roles/exists", roleConfig) - require.NoError(t, err) - _, err = client.Logical().WriteWithContext(subTestCtx, "pki/roles/good", roleConfig) - require.NoError(t, err) - - t.Run(tc.name, func(t *testing.T) { - _, err := client.Logical().WriteWithContext(subTestCtx, "pki/config/acme", tc.AcmeConfig) - - if tc.validConfig { - require.NoError(t, err) - } else { - require.Error(t, err) - return - } - - _, err = client.Logical().ReadWithContext(subTestCtx, "pki/acme/directory") - if tc.works { - require.NoError(t, err) - - baseAcmeURL := "/v1/pki/" + tc.prefixUrl - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa key") - - acmeClient := getAcmeClientForCluster(t, cluster, baseAcmeURL, accountKey) - - // Create new account - _, err = acmeClient.Discover(subTestCtx) - require.NoError(t, err, "failed acme discovery call") - } else { - require.Error(t, err, "Acme Configuration should prevent usage") - } - - t.Logf("Completed case %v", tc.name) - }) - } -} diff --git a/builtin/logical/pki/path_config_ca.go b/builtin/logical/pki/path_config_ca.go index e77386f4b18760..2399db4e5ee0d2 100644 --- a/builtin/logical/pki/path_config_ca.go +++ b/builtin/logical/pki/path_config_ca.go @@ -1,11 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "context" - "net/http" "github.com/hashicorp/vault/sdk/framework" "github.com/hashicorp/vault/sdk/logical" @@ -14,13 +10,6 @@ import ( func pathConfigCA(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/ca", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "configure", - OperationSuffix: "ca", - }, - Fields: map[string]*framework.FieldSchema{ "pem_bundle": { Type: framework.TypeString, @@ -32,38 +21,6 @@ secret key and certificate.`, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathImportIssuers, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "mapping": { - Type: framework.TypeMap, - Description: "A mapping of issuer_id to key_id for all issuers included in this request", - Required: true, - }, - "imported_keys": { - Type: framework.TypeCommaStringSlice, - Description: "Net-new keys imported as a part of this request", - Required: true, - }, - "imported_issuers": { - Type: framework.TypeCommaStringSlice, - Description: "Net-new issuers imported as a part of this request", - Required: true, - }, - "existing_keys": { - Type: framework.TypeCommaStringSlice, - Description: "Existing keys specified as part of the import bundle of this request", - Required: true, - }, - "existing_issuers": { - Type: framework.TypeCommaStringSlice, - Description: "Existing issuers specified as part of the import bundle of this request", - Required: true, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -90,11 +47,6 @@ For security reasons, the secret key cannot be retrieved later. func pathConfigIssuers(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/issuers", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - Fields: map[string]*framework.FieldSchema{ defaultRef: { Type: framework.TypeString, @@ -106,51 +58,13 @@ func pathConfigIssuers(b *backend) *framework.Path { Default: false, }, }, + Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ Callback: b.pathCAIssuersRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "issuers-configuration", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "default": { - Type: framework.TypeString, - Description: `Reference (name or identifier) to the default issuer.`, - Required: true, - }, - "default_follows_latest_issuer": { - Type: framework.TypeBool, - Description: `Whether the default issuer should automatically follow the latest generated or imported issuer. Defaults to false.`, - Required: true, - }, - }, - }}, - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathCAIssuersWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "issuers", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "default": { - Type: framework.TypeString, - Description: `Reference (name or identifier) to the default issuer.`, - }, - "default_follows_latest_issuer": { - Type: framework.TypeBool, - Description: `Whether the default issuer should automatically follow the latest generated or imported issuer. Defaults to false.`, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -165,13 +79,6 @@ func pathConfigIssuers(b *backend) *framework.Path { func pathReplaceRoot(b *backend) *framework.Path { return &framework.Path{ Pattern: "root/replace", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "replace", - OperationSuffix: "root", - }, - Fields: map[string]*framework.FieldSchema{ "default": { Type: framework.TypeString, @@ -183,23 +90,6 @@ func pathReplaceRoot(b *backend) *framework.Path { Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathCAIssuersWrite, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "default": { - Type: framework.TypeString, - Description: `Reference (name or identifier) to the default issuer.`, - Required: true, - }, - "default_follows_latest_issuer": { - Type: framework.TypeBool, - Description: `Whether the default issuer should automatically follow the latest generated or imported issuer. Defaults to false.`, - Required: true, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -309,11 +199,6 @@ value of the issuer with the name "next", if it exists. func pathConfigKeys(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/keys", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - Fields: map[string]*framework.FieldSchema{ defaultRef: { Type: framework.TypeString, @@ -323,42 +208,12 @@ func pathConfigKeys(b *backend) *framework.Path { Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathKeyDefaultWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "keys", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "default": { - Type: framework.TypeString, - Description: `Reference (name or identifier) to the default issuer.`, - Required: true, - }, - }, - }}, - }, + Callback: b.pathKeyDefaultWrite, ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathKeyDefaultRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "keys-configuration", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "default": { - Type: framework.TypeString, - Description: `Reference (name or identifier) to the default issuer.`, - }, - }, - }}, - }, + Callback: b.pathKeyDefaultRead, ForwardPerformanceStandby: false, ForwardPerformanceSecondary: false, }, diff --git a/builtin/logical/pki/path_config_cluster.go b/builtin/logical/pki/path_config_cluster.go index 4bdfb820576ade..c887db7b7f6dbf 100644 --- a/builtin/logical/pki/path_config_cluster.go +++ b/builtin/logical/pki/path_config_cluster.go @@ -1,12 +1,8 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "context" "fmt" - "net/http" "github.com/asaskevich/govalidator" "github.com/hashicorp/vault/sdk/framework" @@ -16,11 +12,6 @@ import ( func pathConfigCluster(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/cluster", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - Fields: map[string]*framework.FieldSchema{ "path": { Type: framework.TypeString, @@ -49,78 +40,10 @@ For example: http://cdn.example.com/pr1/pki`, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "cluster", - }, Callback: b.pathWriteCluster, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "path": { - Type: framework.TypeString, - Description: `Canonical URI to this mount on this performance -replication cluster's external address. This is for resolving AIA URLs and -providing the {{cluster_path}} template parameter but might be used for other -purposes in the future. - -This should only point back to this particular PR replica and should not ever -point to another PR cluster. It may point to any node in the PR replica, -including standby nodes, and need not always point to the active node. - -For example: https://pr1.vault.example.com:8200/v1/pki`, - }, - "aia_path": { - Type: framework.TypeString, - Description: `Optional URI to this mount's AIA distribution -point; may refer to an external non-Vault responder. This is for resolving AIA -URLs and providing the {{cluster_aia_path}} template parameter and will not -be used for other purposes. As such, unlike path above, this could safely -be an insecure transit mechanism (like HTTP without TLS). - -For example: http://cdn.example.com/pr1/pki`, - }, - }, - }}, - }, }, logical.ReadOperation: &framework.PathOperation{ Callback: b.pathReadCluster, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "cluster-configuration", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "path": { - Type: framework.TypeString, - Description: `Canonical URI to this mount on this performance -replication cluster's external address. This is for resolving AIA URLs and -providing the {{cluster_path}} template parameter but might be used for other -purposes in the future. - -This should only point back to this particular PR replica and should not ever -point to another PR cluster. It may point to any node in the PR replica, -including standby nodes, and need not always point to the active node. - -For example: https://pr1.vault.example.com:8200/v1/pki`, - Required: true, - }, - "aia_path": { - Type: framework.TypeString, - Description: `Optional URI to this mount's AIA distribution -point; may refer to an external non-Vault responder. This is for resolving AIA -URLs and providing the {{cluster_aia_path}} template parameter and will not -be used for other purposes. As such, unlike path above, this could safely -be an insecure transit mechanism (like HTTP without TLS). - -For example: http://cdn.example.com/pr1/pki`, - }, - }, - }}, - }, }, }, @@ -155,9 +78,6 @@ func (b *backend) pathWriteCluster(ctx context.Context, req *logical.Request, da if value, ok := data.GetOk("path"); ok { cfg.Path = value.(string) - - // This field is required by ACME, if ever we allow un-setting in the - // future, this code will need to verify that ACME is not enabled. if !govalidator.IsURL(cfg.Path) { return nil, fmt.Errorf("invalid, non-URL path given to cluster: %v", cfg.Path) } diff --git a/builtin/logical/pki/path_config_crl.go b/builtin/logical/pki/path_config_crl.go index 0249e6f084b70f..9cf57aa1e035aa 100644 --- a/builtin/logical/pki/path_config_crl.go +++ b/builtin/logical/pki/path_config_crl.go @@ -1,12 +1,8 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "context" "fmt" - "net/http" "time" "github.com/hashicorp/vault/helper/constants" @@ -52,11 +48,6 @@ var defaultCrlConfig = crlConfig{ func pathConfigCRL(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/crl", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - Fields: map[string]*framework.FieldSchema{ "expiry": { Type: framework.TypeString, @@ -118,149 +109,10 @@ existing CRL and OCSP paths will return the unified CRL instead of a response ba Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "crl-configuration", - }, Callback: b.pathCRLRead, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "expiry": { - Type: framework.TypeString, - Description: `The amount of time the generated CRL should be -valid; defaults to 72 hours`, - Required: true, - }, - "disable": { - Type: framework.TypeBool, - Description: `If set to true, disables generating the CRL entirely.`, - Required: true, - }, - "ocsp_disable": { - Type: framework.TypeBool, - Description: `If set to true, ocsp unauthorized responses will be returned.`, - Required: true, - }, - "ocsp_expiry": { - Type: framework.TypeString, - Description: `The amount of time an OCSP response will be valid (controls -the NextUpdate field); defaults to 12 hours`, - Required: true, - }, - "auto_rebuild": { - Type: framework.TypeBool, - Description: `If set to true, enables automatic rebuilding of the CRL`, - Required: true, - }, - "auto_rebuild_grace_period": { - Type: framework.TypeString, - Description: `The time before the CRL expires to automatically rebuild it, when enabled. Must be shorter than the CRL expiry. Defaults to 12h.`, - Required: true, - }, - "enable_delta": { - Type: framework.TypeBool, - Description: `Whether to enable delta CRLs between authoritative CRL rebuilds`, - Required: true, - }, - "delta_rebuild_interval": { - Type: framework.TypeString, - Description: `The time between delta CRL rebuilds if a new revocation has occurred. Must be shorter than the CRL expiry. Defaults to 15m.`, - Required: true, - }, - "cross_cluster_revocation": { - Type: framework.TypeBool, - Description: `Whether to enable a global, cross-cluster revocation queue. -Must be used with auto_rebuild=true.`, - Required: true, - }, - "unified_crl": { - Type: framework.TypeBool, - Description: `If set to true enables global replication of revocation entries, -also enabling unified versions of OCSP and CRLs if their respective features are enabled. -disable for CRLs and ocsp_disable for OCSP.`, - Required: true, - }, - "unified_crl_on_existing_paths": { - Type: framework.TypeBool, - Description: `If set to true, -existing CRL and OCSP paths will return the unified CRL instead of a response based on cluster-local data`, - Required: true, - }, - }, - }}, - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathCRLWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "crl", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "expiry": { - Type: framework.TypeString, - Description: `The amount of time the generated CRL should be -valid; defaults to 72 hours`, - Default: "72h", - }, - "disable": { - Type: framework.TypeBool, - Description: `If set to true, disables generating the CRL entirely.`, - }, - "ocsp_disable": { - Type: framework.TypeBool, - Description: `If set to true, ocsp unauthorized responses will be returned.`, - }, - "ocsp_expiry": { - Type: framework.TypeString, - Description: `The amount of time an OCSP response will be valid (controls -the NextUpdate field); defaults to 12 hours`, - Default: "1h", - }, - "auto_rebuild": { - Type: framework.TypeBool, - Description: `If set to true, enables automatic rebuilding of the CRL`, - }, - "auto_rebuild_grace_period": { - Type: framework.TypeString, - Description: `The time before the CRL expires to automatically rebuild it, when enabled. Must be shorter than the CRL expiry. Defaults to 12h.`, - Default: "12h", - }, - "enable_delta": { - Type: framework.TypeBool, - Description: `Whether to enable delta CRLs between authoritative CRL rebuilds`, - }, - "delta_rebuild_interval": { - Type: framework.TypeString, - Description: `The time between delta CRL rebuilds if a new revocation has occurred. Must be shorter than the CRL expiry. Defaults to 15m.`, - Default: "15m", - }, - "cross_cluster_revocation": { - Type: framework.TypeBool, - Description: `Whether to enable a global, cross-cluster revocation queue. -Must be used with auto_rebuild=true.`, - Required: false, - }, - "unified_crl": { - Type: framework.TypeBool, - Description: `If set to true enables global replication of revocation entries, -also enabling unified versions of OCSP and CRLs if their respective features are enabled. -disable for CRLs and ocsp_disable for OCSP.`, - Required: false, - }, - "unified_crl_on_existing_paths": { - Type: framework.TypeBool, - Description: `If set to true, -existing CRL and OCSP paths will return the unified CRL instead of a response based on cluster-local data`, - Required: false, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -421,8 +273,6 @@ func (b *backend) pathCRLWrite(ctx context.Context, req *logical.Request, d *fra b.crlBuilder.markConfigDirty() b.crlBuilder.reloadConfigIfRequired(sc) - resp := genResponseFromCrlConfig(config) - // Note this only affects/happens on the main cluster node, if you need to // notify something based on a configuration change on all server types // have a look at crlBuilder::reloadConfigIfRequired @@ -431,7 +281,7 @@ func (b *backend) pathCRLWrite(ctx context.Context, req *logical.Request, d *fra // auto-rebuild and we aren't now or equivalently, we changed our // mind about delta CRLs and need a new complete one or equivalently, // we changed our mind about unified CRLs), rotate the CRLs. - warnings, crlErr := b.crlBuilder.rebuild(sc, true) + crlErr := b.crlBuilder.rebuild(sc, true) if crlErr != nil { switch crlErr.(type) { case errutil.UserError: @@ -440,12 +290,9 @@ func (b *backend) pathCRLWrite(ctx context.Context, req *logical.Request, d *fra return nil, fmt.Errorf("error encountered during CRL building: %w", crlErr) } } - for index, warning := range warnings { - resp.AddWarning(fmt.Sprintf("Warning %d during CRL rebuild: %v", index+1, warning)) - } } - return resp, nil + return genResponseFromCrlConfig(config), nil } func genResponseFromCrlConfig(config *crlConfig) *logical.Response { diff --git a/builtin/logical/pki/path_config_urls.go b/builtin/logical/pki/path_config_urls.go index 341f3db635573c..5c81f93367fbe6 100644 --- a/builtin/logical/pki/path_config_urls.go +++ b/builtin/logical/pki/path_config_urls.go @@ -1,12 +1,8 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "context" "fmt" - "net/http" "strings" "github.com/asaskevich/govalidator" @@ -17,11 +13,6 @@ import ( func pathConfigURLs(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/urls", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, - Fields: map[string]*framework.FieldSchema{ "issuing_certificates": { Type: framework.TypeCommaStringSlice, @@ -56,82 +47,10 @@ to be set on all PR secondary clusters.`, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "urls", - }, Callback: b.pathWriteURL, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "issuing_certificates": { - Type: framework.TypeCommaStringSlice, - Description: `Comma-separated list of URLs to be used -for the issuing certificate attribute. See also RFC 5280 Section 4.2.2.1.`, - }, - "crl_distribution_points": { - Type: framework.TypeCommaStringSlice, - Description: `Comma-separated list of URLs to be used -for the CRL distribution points attribute. See also RFC 5280 Section 4.2.1.13.`, - }, - "ocsp_servers": { - Type: framework.TypeCommaStringSlice, - Description: `Comma-separated list of URLs to be used -for the OCSP servers attribute. See also RFC 5280 Section 4.2.2.1.`, - }, - "enable_templating": { - Type: framework.TypeBool, - Description: `Whether or not to enabling templating of the -above AIA fields. When templating is enabled the special values '{{issuer_id}}' -and '{{cluster_path}}' are available, but the addresses are not checked for -URI validity until issuance time. This requires /config/cluster's path to be -set on all PR Secondary clusters.`, - Default: false, - }, - }, - }}, - }, }, logical.ReadOperation: &framework.PathOperation{ Callback: b.pathReadURL, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "urls-configuration", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "issuing_certificates": { - Type: framework.TypeCommaStringSlice, - Description: `Comma-separated list of URLs to be used -for the issuing certificate attribute. See also RFC 5280 Section 4.2.2.1.`, - Required: true, - }, - "crl_distribution_points": { - Type: framework.TypeCommaStringSlice, - Description: `Comma-separated list of URLs to be used -for the CRL distribution points attribute. See also RFC 5280 Section 4.2.1.13.`, - Required: true, - }, - "ocsp_servers": { - Type: framework.TypeCommaStringSlice, - Description: `Comma-separated list of URLs to be used -for the OCSP servers attribute. See also RFC 5280 Section 4.2.2.1.`, - Required: true, - }, - "enable_templating": { - Type: framework.TypeBool, - Description: `Whether or not to enable templating of the -above AIA fields. When templating is enabled the special values '{{issuer_id}}' -and '{{cluster_path}}' are available, but the addresses are not checked for -URI validity until issuance time. This requires /config/cluster's path to be -set on all PR Secondary clusters.`, - Required: true, - }, - }, - }}, - }, }, }, diff --git a/builtin/logical/pki/path_fetch.go b/builtin/logical/pki/path_fetch.go index 1798e705db88d4..c1a4f5be4736ab 100644 --- a/builtin/logical/pki/path_fetch.go +++ b/builtin/logical/pki/path_fetch.go @@ -1,13 +1,9 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "context" "encoding/pem" "fmt" - "net/http" "strings" "time" @@ -18,53 +14,14 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -var pathFetchReadSchema = map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: false, - }, - "revocation_time": { - Type: framework.TypeString, - Description: `Revocation time`, - Required: false, - }, - "revocation_time_rfc3339": { - Type: framework.TypeString, - Description: `Revocation time RFC 3339 formatted`, - Required: false, - }, - "issuer_id": { - Type: framework.TypeString, - Description: `ID of the issuer`, - Required: false, - }, - "ca_chain": { - Type: framework.TypeStringSlice, - Description: `Issuing CA Chain`, - Required: false, - }, - }, - }}, -} - // Returns the CA in raw format func pathFetchCA(b *backend) *framework.Path { return &framework.Path{ Pattern: `ca(/pem)?`, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "ca-der|ca-pem", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathFetchRead, - Responses: pathFetchReadSchema, + Callback: b.pathFetchRead, }, }, @@ -78,15 +35,9 @@ func pathFetchCAChain(b *backend) *framework.Path { return &framework.Path{ Pattern: `(cert/)?ca_chain`, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "ca-chain-pem|cert-ca-chain", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathFetchRead, - Responses: pathFetchReadSchema, + Callback: b.pathFetchRead, }, }, @@ -100,15 +51,9 @@ func pathFetchCRL(b *backend) *framework.Path { return &framework.Path{ Pattern: `crl(/pem|/delta(/pem)?)?`, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "crl-der|crl-pem|crl-delta|crl-delta-pem", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathFetchRead, - Responses: pathFetchReadSchema, + Callback: b.pathFetchRead, }, }, @@ -122,11 +67,6 @@ func pathFetchUnifiedCRL(b *backend) *framework.Path { return &framework.Path{ Pattern: `unified-crl(/pem|/delta(/pem)?)?`, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "unified-crl-der|unified-crl-pem|unified-crl-delta|unified-crl-delta-pem", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ Callback: b.pathFetchRead, @@ -142,12 +82,6 @@ func pathFetchUnifiedCRL(b *backend) *framework.Path { func pathFetchValidRaw(b *backend) *framework.Path { return &framework.Path{ Pattern: `cert/(?P[0-9A-Fa-f-:]+)/raw(/pem)?`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "cert-raw-der|cert-raw-pem", - }, - Fields: map[string]*framework.FieldSchema{ "serial": { Type: framework.TypeString, @@ -158,8 +92,7 @@ hyphen-separated octal`, Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathFetchRead, - Responses: pathFetchReadSchema, + Callback: b.pathFetchRead, }, }, @@ -173,12 +106,6 @@ hyphen-separated octal`, func pathFetchValid(b *backend) *framework.Path { return &framework.Path{ Pattern: `cert/(?P[0-9A-Fa-f-:]+)`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "cert", - }, - Fields: map[string]*framework.FieldSchema{ "serial": { Type: framework.TypeString, @@ -189,8 +116,7 @@ hyphen-separated octal`, Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathFetchRead, - Responses: pathFetchReadSchema, + Callback: b.pathFetchRead, }, }, @@ -209,15 +135,9 @@ func pathFetchCRLViaCertPath(b *backend) *framework.Path { return &framework.Path{ Pattern: pattern, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "cert-crl|cert-delta-crl|cert-unified-crl|cert-unified-delta-crl", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathFetchRead, - Responses: pathFetchReadSchema, + Callback: b.pathFetchRead, }, }, @@ -231,26 +151,9 @@ func pathFetchListCerts(b *backend) *framework.Path { return &framework.Path{ Pattern: "certs/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "certs", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathFetchCertList, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "keys": { - Type: framework.TypeStringSlice, - Description: `A list of keys`, - Required: true, - }, - }, - }}, - }, }, }, diff --git a/builtin/logical/pki/path_fetch_issuers.go b/builtin/logical/pki/path_fetch_issuers.go index 48596bb5849c83..4a2054bd6ecd69 100644 --- a/builtin/logical/pki/path_fetch_issuers.go +++ b/builtin/logical/pki/path_fetch_issuers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -8,7 +5,6 @@ import ( "crypto/x509" "encoding/pem" "fmt" - "net/http" "strings" "time" @@ -21,31 +17,9 @@ func pathListIssuers(b *backend) *framework.Path { return &framework.Path{ Pattern: "issuers/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "issuers", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathListIssuersHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "keys": { - Type: framework.TypeStringSlice, - Description: `A list of keys`, - Required: true, - }, - "key_info": { - Type: framework.TypeMap, - Description: `Key info with issuer name`, - Required: false, - }, - }, - }}, - }, }, }, @@ -84,28 +58,8 @@ func (b *backend) pathListIssuersHandler(ctx context.Context, req *logical.Reque responseKeys = append(responseKeys, string(identifier)) responseInfo[string(identifier)] = map[string]interface{}{ - "issuer_name": issuer.Name, - "is_default": identifier == config.DefaultIssuerId, - "serial_number": issuer.SerialNumber, - - // While nominally this could be considered sensitive information - // to be returned on an unauthed endpoint, there's two mitigating - // circumstances: - // - // 1. Key IDs are purely random numbers generated by Vault and - // have no relationship to the actual key material. - // 2. They also don't _do_ anything by themselves. There is no - // modification of KeyIDs allowed, you need to be authenticated - // to Vault to understand what they mean, you _essentially_ - // get the same information from looking at/comparing various - // cert's SubjectPublicKeyInfo field, and there's the `default` - // reference that anyone with issuer generation capabilities - // can use even if they can't access any of the other /key/* - // endpoints. - // - // So all in all, exposing this value is not a security risk and - // is otherwise beneficial for the UI, hence its inclusion. - "key_id": issuer.KeyID, + "issuer_name": issuer.Name, + "is_default": identifier == config.DefaultIssuerId, } } @@ -122,27 +76,15 @@ their identifier and their name (if set). func pathGetIssuer(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "$" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "issuer", - } - - return buildPathIssuer(b, pattern, displayAttrs) + return buildPathIssuer(b, pattern) } func pathGetUnauthedIssuer(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/(json|der|pem)$" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "issuer-json|issuer-der|issuer-pem", - } - - return buildPathGetIssuer(b, pattern, displayAttrs) + return buildPathGetIssuer(b, pattern) } -func buildPathIssuer(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathIssuer(b *backend, pattern string) *framework.Path { fields := map[string]*framework.FieldSchema{} fields = addIssuerRefNameFields(fields) @@ -209,125 +151,29 @@ to be set on all PR secondary clusters.`, Default: false, } - updateIssuerSchema := map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "issuer_id": { - Type: framework.TypeString, - Description: `Issuer Id`, - Required: false, - }, - "issuer_name": { - Type: framework.TypeString, - Description: `Issuer Name`, - Required: false, - }, - "key_id": { - Type: framework.TypeString, - Description: `Key Id`, - Required: false, - }, - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: false, - }, - "manual_chain": { - Type: framework.TypeStringSlice, - Description: `Manual Chain`, - Required: false, - }, - "ca_chain": { - Type: framework.TypeStringSlice, - Description: `CA Chain`, - Required: false, - }, - "leaf_not_after_behavior": { - Type: framework.TypeString, - Description: `Leaf Not After Behavior`, - Required: false, - }, - "usage": { - Type: framework.TypeStringSlice, - Description: `Usage`, - Required: false, - }, - "revocation_signature_algorithm": { - Type: framework.TypeString, - Description: `Revocation Signature Alogrithm`, - Required: false, - }, - "revoked": { - Type: framework.TypeBool, - Description: `Revoked`, - Required: false, - }, - "revocation_time": { - Type: framework.TypeInt, - Required: false, - }, - "revocation_time_rfc3339": { - Type: framework.TypeString, - Required: false, - }, - "issuing_certificates": { - Type: framework.TypeStringSlice, - Description: `Issuing Certificates`, - Required: false, - }, - "crl_distribution_points": { - Type: framework.TypeStringSlice, - Description: `CRL Distribution Points`, - Required: false, - }, - "ocsp_servers": { - Type: framework.TypeStringSlice, - Description: `OSCP Servers`, - Required: false, - }, - "enable_aia_url_templating": { - Type: framework.TypeBool, - Description: `Whether or not templating is enabled for AIA fields`, - Required: false, - }, - }, - }}, - } - return &framework.Path{ // Returns a JSON entry. - Pattern: pattern, - DisplayAttrs: displayAttrs, - Fields: fields, + Pattern: pattern, + Fields: fields, Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathGetIssuer, - Responses: updateIssuerSchema, + Callback: b.pathGetIssuer, }, logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathUpdateIssuer, - Responses: updateIssuerSchema, - + Callback: b.pathUpdateIssuer, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathDeleteIssuer, - Responses: map[int][]framework.Response{ - http.StatusNoContent: {{ - Description: "No Content", - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, logical.PatchOperation: &framework.PathOperation{ - Callback: b.pathPatchIssuer, - Responses: updateIssuerSchema, + Callback: b.pathPatchIssuer, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -339,51 +185,18 @@ to be set on all PR secondary clusters.`, } } -func buildPathGetIssuer(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathGetIssuer(b *backend, pattern string) *framework.Path { fields := map[string]*framework.FieldSchema{} fields = addIssuerRefField(fields) - getIssuerSchema := map[int][]framework.Response{ - http.StatusNotModified: {{ - Description: "Not Modified", - }}, - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "issuer_id": { - Type: framework.TypeString, - Description: `Issuer Id`, - Required: true, - }, - "issuer_name": { - Type: framework.TypeString, - Description: `Issuer Name`, - Required: true, - }, - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: true, - }, - "ca_chain": { - Type: framework.TypeStringSlice, - Description: `CA Chain`, - Required: true, - }, - }, - }}, - } - return &framework.Path{ // Returns a JSON entry. - Pattern: pattern, - DisplayAttrs: displayAttrs, - Fields: fields, + Pattern: pattern, + Fields: fields, Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathGetIssuer, - Responses: getIssuerSchema, + Callback: b.pathGetIssuer, }, }, @@ -1155,50 +968,26 @@ the certificate. func pathGetIssuerCRL(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/crl(/pem|/der|/delta(/pem|/der)?)?" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationSuffix: "crl|crl-pem|crl-der|crl-delta|crl-delta-pem|crl-delta-der", - } - - return buildPathGetIssuerCRL(b, pattern, displayAttrs) + return buildPathGetIssuerCRL(b, pattern) } func pathGetIssuerUnifiedCRL(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/unified-crl(/pem|/der|/delta(/pem|/der)?)?" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationSuffix: "unified-crl|unified-crl-pem|unified-crl-der|unified-crl-delta|unified-crl-delta-pem|unified-crl-delta-der", - } - - return buildPathGetIssuerCRL(b, pattern, displayAttrs) + return buildPathGetIssuerCRL(b, pattern) } -func buildPathGetIssuerCRL(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathGetIssuerCRL(b *backend, pattern string) *framework.Path { fields := map[string]*framework.FieldSchema{} fields = addIssuerRefNameFields(fields) return &framework.Path{ // Returns raw values. - Pattern: pattern, - DisplayAttrs: displayAttrs, - Fields: fields, + Pattern: pattern, + Fields: fields, Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ Callback: b.pathGetIssuerCRL, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "crl": { - Type: framework.TypeString, - Required: false, - }, - }, - }}, - }, }, }, @@ -1218,20 +1007,9 @@ func (b *backend) pathGetIssuerCRL(ctx context.Context, req *logical.Request, da } sc := b.makeStorageContext(ctx, req.Storage) - warnings, err := b.crlBuilder.rebuildIfForced(sc) - if err != nil { + if err := b.crlBuilder.rebuildIfForced(sc); err != nil { return nil, err } - if len(warnings) > 0 { - // Since this is a fetch of a specific CRL, this most likely comes - // from an automated system of some sort; these warnings would be - // ignored and likely meaningless. Log them instead. - msg := "During rebuild of CRL on issuer CRL fetch, got the following warnings:" - for index, warning := range warnings { - msg = fmt.Sprintf("%v\n %d. %v", msg, index+1, warning) - } - b.Logger().Warn(msg) - } var certificate []byte var contentType string diff --git a/builtin/logical/pki/path_fetch_keys.go b/builtin/logical/pki/path_fetch_keys.go index 23b3bf58dcb22e..d0a01cb429b0ef 100644 --- a/builtin/logical/pki/path_fetch_keys.go +++ b/builtin/logical/pki/path_fetch_keys.go @@ -1,13 +1,9 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "context" "crypto" "fmt" - "net/http" "github.com/hashicorp/vault/sdk/helper/certutil" "github.com/hashicorp/vault/sdk/helper/errutil" @@ -20,31 +16,9 @@ func pathListKeys(b *backend) *framework.Path { return &framework.Path{ Pattern: "keys/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "keys", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ - Callback: b.pathListKeysHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "keys": { - Type: framework.TypeStringSlice, - Description: `A list of keys`, - Required: true, - }, - "key_info": { - Type: framework.TypeMap, - Description: `Key info with issuer name`, - Required: false, - }, - }, - }}, - }, + Callback: b.pathListKeysHandler, ForwardPerformanceStandby: false, ForwardPerformanceSecondary: false, }, @@ -98,19 +72,12 @@ func (b *backend) pathListKeysHandler(ctx context.Context, req *logical.Request, func pathKey(b *backend) *framework.Path { pattern := "key/" + framework.GenericNameRegex(keyRefParam) - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "key", - } - - return buildPathKey(b, pattern, displayAttrs) + return buildPathKey(b, pattern) } -func buildPathKey(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathKey(b *backend, pattern string) *framework.Path { return &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, + Pattern: pattern, Fields: map[string]*framework.FieldSchema{ keyRefParam: { @@ -126,81 +93,17 @@ func buildPathKey(b *backend, pattern string, displayAttrs *framework.DisplayAtt Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathGetKeyHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "key_id": { - Type: framework.TypeString, - Description: `Key Id`, - Required: true, - }, - "key_name": { - Type: framework.TypeString, - Description: `Key Name`, - Required: true, - }, - "key_type": { - Type: framework.TypeString, - Description: `Key Type`, - Required: true, - }, - "subject_key_id": { - Type: framework.TypeString, - Description: `RFC 5280 Subject Key Identifier of the public counterpart`, - Required: false, - }, - "managed_key_id": { - Type: framework.TypeString, - Description: `Managed Key Id`, - Required: false, - }, - "managed_key_name": { - Type: framework.TypeString, - Description: `Managed Key Name`, - Required: false, - }, - }, - }}, - }, + Callback: b.pathGetKeyHandler, ForwardPerformanceStandby: false, ForwardPerformanceSecondary: false, }, logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathUpdateKeyHandler, - Responses: map[int][]framework.Response{ - http.StatusNoContent: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "key_id": { - Type: framework.TypeString, - Description: `Key Id`, - Required: true, - }, - "key_name": { - Type: framework.TypeString, - Description: `Key Name`, - Required: true, - }, - "key_type": { - Type: framework.TypeString, - Description: `Key Type`, - Required: true, - }, - }, - }}, - }, + Callback: b.pathUpdateKeyHandler, ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, logical.DeleteOperation: &framework.PathOperation{ - Callback: b.pathDeleteKeyHandler, - Responses: map[int][]framework.Response{ - http.StatusNoContent: {{ - Description: "No Content", - }}, - }, + Callback: b.pathDeleteKeyHandler, ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, diff --git a/builtin/logical/pki/path_intermediate.go b/builtin/logical/pki/path_intermediate.go index 1a685630e63bba..cfcff87b04f370 100644 --- a/builtin/logical/pki/path_intermediate.go +++ b/builtin/logical/pki/path_intermediate.go @@ -1,13 +1,9 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "context" "encoding/base64" "fmt" - "net/http" "github.com/hashicorp/vault/sdk/framework" "github.com/hashicorp/vault/sdk/helper/errutil" @@ -15,27 +11,13 @@ import ( ) func pathGenerateIntermediate(b *backend) *framework.Path { - pattern := "intermediate/generate/" + framework.GenericNameRegex("exported") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "generate", - OperationSuffix: "intermediate", - } - - return buildPathGenerateIntermediate(b, pattern, displayAttrs) + return buildPathGenerateIntermediate(b, "intermediate/generate/"+framework.GenericNameRegex("exported")) } func pathSetSignedIntermediate(b *backend) *framework.Path { ret := &framework.Path{ Pattern: "intermediate/set-signed", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "set-signed", - OperationSuffix: "intermediate", - }, - Fields: map[string]*framework.FieldSchema{ "certificate": { Type: framework.TypeString, @@ -49,38 +31,6 @@ appended to the bundle.`, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathImportIssuers, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "mapping": { - Type: framework.TypeMap, - Description: "A mapping of issuer_id to key_id for all issuers included in this request", - Required: true, - }, - "imported_keys": { - Type: framework.TypeCommaStringSlice, - Description: "Net-new keys imported as a part of this request", - Required: true, - }, - "imported_issuers": { - Type: framework.TypeCommaStringSlice, - Description: "Net-new issuers imported as a part of this request", - Required: true, - }, - "existing_keys": { - Type: framework.TypeCommaStringSlice, - Description: "Existing keys specified as part of the import bundle of this request", - Required: true, - }, - "existing_issuers": { - Type: framework.TypeCommaStringSlice, - Description: "Existing issuers specified as part of the import bundle of this request", - Required: true, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, diff --git a/builtin/logical/pki/path_issue_sign.go b/builtin/logical/pki/path_issue_sign.go index 388e80cd21a786..ccb1251b63e518 100644 --- a/builtin/logical/pki/path_issue_sign.go +++ b/builtin/logical/pki/path_issue_sign.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -10,7 +7,6 @@ import ( "encoding/base64" "encoding/pem" "fmt" - "net/http" "strings" "time" @@ -23,78 +19,21 @@ import ( func pathIssue(b *backend) *framework.Path { pattern := "issue/" + framework.GenericNameRegex("role") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "issue", - OperationSuffix: "with-role", - } - - return buildPathIssue(b, pattern, displayAttrs) + return buildPathIssue(b, pattern) } func pathIssuerIssue(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/issue/" + framework.GenericNameRegex("role") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationVerb: "issue", - OperationSuffix: "with-role", - } - - return buildPathIssue(b, pattern, displayAttrs) + return buildPathIssue(b, pattern) } -func buildPathIssue(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathIssue(b *backend, pattern string) *framework.Path { ret := &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, + Pattern: pattern, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.metricsWrap("issue", roleRequired, b.pathIssue), - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: true, - }, - "issuing_ca": { - Type: framework.TypeString, - Description: `Issuing Certificate Authority`, - Required: true, - }, - "ca_chain": { - Type: framework.TypeCommaStringSlice, - Description: `Certificate Chain`, - Required: false, - }, - "serial_number": { - Type: framework.TypeString, - Description: `Serial Number`, - Required: false, - }, - "expiration": { - Type: framework.TypeString, - Description: `Time of expiration`, - Required: false, - }, - "private_key": { - Type: framework.TypeString, - Description: `Private key`, - Required: false, - }, - "private_key_type": { - Type: framework.TypeString, - Description: `Private key type`, - Required: false, - }, - }, - }}, - }, }, }, @@ -108,78 +47,21 @@ func buildPathIssue(b *backend, pattern string, displayAttrs *framework.DisplayA func pathSign(b *backend) *framework.Path { pattern := "sign/" + framework.GenericNameRegex("role") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "sign", - OperationSuffix: "with-role", - } - - return buildPathSign(b, pattern, displayAttrs) + return buildPathSign(b, pattern) } func pathIssuerSign(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/sign/" + framework.GenericNameRegex("role") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationVerb: "sign", - OperationSuffix: "with-role", - } - - return buildPathSign(b, pattern, displayAttrs) + return buildPathSign(b, pattern) } -func buildPathSign(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathSign(b *backend, pattern string) *framework.Path { ret := &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, + Pattern: pattern, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.metricsWrap("sign", roleRequired, b.pathSign), - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: true, - }, - "issuing_ca": { - Type: framework.TypeString, - Description: `Issuing Certificate Authority`, - Required: true, - }, - "ca_chain": { - Type: framework.TypeCommaStringSlice, - Description: `Certificate Chain`, - Required: false, - }, - "serial_number": { - Type: framework.TypeString, - Description: `Serial Number`, - Required: true, - }, - "expiration": { - Type: framework.TypeString, - Description: `Time of expiration`, - Required: true, - }, - "private_key": { - Type: framework.TypeString, - Description: `Private key`, - Required: false, - }, - "private_key_type": { - Type: framework.TypeString, - Description: `Private key type`, - Required: false, - }, - }, - }}, - }, }, }, @@ -200,79 +82,22 @@ func buildPathSign(b *backend, pattern string, displayAttrs *framework.DisplayAt func pathIssuerSignVerbatim(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/sign-verbatim" + framework.OptionalParamRegex("role") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationVerb: "sign", - OperationSuffix: "verbatim|verbatim-with-role", - } - - return buildPathIssuerSignVerbatim(b, pattern, displayAttrs) + return buildPathIssuerSignVerbatim(b, pattern) } func pathSignVerbatim(b *backend) *framework.Path { pattern := "sign-verbatim" + framework.OptionalParamRegex("role") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "sign", - OperationSuffix: "verbatim|verbatim-with-role", - } - - return buildPathIssuerSignVerbatim(b, pattern, displayAttrs) + return buildPathIssuerSignVerbatim(b, pattern) } -func buildPathIssuerSignVerbatim(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathIssuerSignVerbatim(b *backend, pattern string) *framework.Path { ret := &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, - Fields: getCsrSignVerbatimSchemaFields(), + Pattern: pattern, + Fields: map[string]*framework.FieldSchema{}, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.metricsWrap("sign-verbatim", roleOptional, b.pathSignVerbatim), - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: true, - }, - "issuing_ca": { - Type: framework.TypeString, - Description: `Issuing Certificate Authority`, - Required: true, - }, - "ca_chain": { - Type: framework.TypeCommaStringSlice, - Description: `Certificate Chain`, - Required: false, - }, - "serial_number": { - Type: framework.TypeString, - Description: `Serial Number`, - Required: false, - }, - "expiration": { - Type: framework.TypeString, - Description: `Time of expiration`, - Required: false, - }, - "private_key": { - Type: framework.TypeString, - Description: `Private key`, - Required: false, - }, - "private_key_type": { - Type: framework.TypeString, - Description: `Private key type`, - Required: false, - }, - }, - }}, - }, }, }, @@ -280,6 +105,61 @@ func buildPathIssuerSignVerbatim(b *backend, pattern string, displayAttrs *frame HelpDescription: pathIssuerSignVerbatimHelpDesc, } + ret.Fields = addNonCACommonFields(ret.Fields) + + ret.Fields["csr"] = &framework.FieldSchema{ + Type: framework.TypeString, + Default: "", + Description: `PEM-format CSR to be signed. Values will be +taken verbatim from the CSR, except for +basic constraints.`, + } + + ret.Fields["key_usage"] = &framework.FieldSchema{ + Type: framework.TypeCommaStringSlice, + Default: []string{"DigitalSignature", "KeyAgreement", "KeyEncipherment"}, + Description: `A comma-separated string or list of key usages (not extended +key usages). Valid values can be found at +https://golang.org/pkg/crypto/x509/#KeyUsage +-- simply drop the "KeyUsage" part of the name. +To remove all key usages from being set, set +this value to an empty list.`, + } + + ret.Fields["ext_key_usage"] = &framework.FieldSchema{ + Type: framework.TypeCommaStringSlice, + Default: []string{}, + Description: `A comma-separated string or list of extended key usages. Valid values can be found at +https://golang.org/pkg/crypto/x509/#ExtKeyUsage +-- simply drop the "ExtKeyUsage" part of the name. +To remove all key usages from being set, set +this value to an empty list.`, + } + + ret.Fields["ext_key_usage_oids"] = &framework.FieldSchema{ + Type: framework.TypeCommaStringSlice, + Description: `A comma-separated string or list of extended key usage oids.`, + } + + ret.Fields["signature_bits"] = &framework.FieldSchema{ + Type: framework.TypeInt, + Default: 0, + Description: `The number of bits to use in the signature +algorithm; accepts 256 for SHA-2-256, 384 for SHA-2-384, and 512 for +SHA-2-512. Defaults to 0 to automatically detect based on key length +(SHA-2-256 for RSA keys, and matching the curve size for NIST P-Curves).`, + DisplayAttrs: &framework.DisplayAttributes{ + Value: 0, + }, + } + + ret.Fields["use_pss"] = &framework.FieldSchema{ + Type: framework.TypeBool, + Default: false, + Description: `Whether or not to use PSS signatures when using a +RSA key-type issuer. Defaults to false.`, + } + return ret } @@ -322,7 +202,51 @@ func (b *backend) pathSign(ctx context.Context, req *logical.Request, data *fram // pathSignVerbatim issues a certificate from a submitted CSR, *not* subject to // role restrictions func (b *backend) pathSignVerbatim(ctx context.Context, req *logical.Request, data *framework.FieldData, role *roleEntry) (*logical.Response, error) { - entry := buildSignVerbatimRole(data, role) + entry := &roleEntry{ + AllowLocalhost: true, + AllowAnyName: true, + AllowIPSANs: true, + AllowWildcardCertificates: new(bool), + EnforceHostnames: false, + KeyType: "any", + UseCSRCommonName: true, + UseCSRSANs: true, + AllowedOtherSANs: []string{"*"}, + AllowedSerialNumbers: []string{"*"}, + AllowedURISANs: []string{"*"}, + AllowedUserIDs: []string{"*"}, + CNValidations: []string{"disabled"}, + GenerateLease: new(bool), + KeyUsage: data.Get("key_usage").([]string), + ExtKeyUsage: data.Get("ext_key_usage").([]string), + ExtKeyUsageOIDs: data.Get("ext_key_usage_oids").([]string), + SignatureBits: data.Get("signature_bits").(int), + UsePSS: data.Get("use_pss").(bool), + } + *entry.AllowWildcardCertificates = true + + *entry.GenerateLease = false + + if role != nil { + if role.TTL > 0 { + entry.TTL = role.TTL + } + if role.MaxTTL > 0 { + entry.MaxTTL = role.MaxTTL + } + if role.GenerateLease != nil { + *entry.GenerateLease = *role.GenerateLease + } + if role.NotBeforeDuration > 0 { + entry.NotBeforeDuration = role.NotBeforeDuration + } + entry.NoStore = role.NoStore + entry.Issuer = role.Issuer + } + + if len(entry.Issuer) == 0 { + entry.Issuer = defaultRef + } return b.pathIssueSignCert(ctx, req, data, entry, true, true) } @@ -495,7 +419,7 @@ func (b *backend) pathIssueSignCert(ctx context.Context, req *logical.Request, d if err != nil { return nil, fmt.Errorf("unable to store certificate locally: %w", err) } - b.ifCountEnabledIncrementTotalCertificatesCount(certsCounted, key) + b.incrementTotalCertificatesCount(certsCounted, key) } if useCSR { diff --git a/builtin/logical/pki/path_manage_issuers.go b/builtin/logical/pki/path_manage_issuers.go index 0a4a4bc8cb314c..689b3a7166197a 100644 --- a/builtin/logical/pki/path_manage_issuers.go +++ b/builtin/logical/pki/path_manage_issuers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -9,7 +6,6 @@ import ( "crypto/x509" "encoding/pem" "fmt" - "net/http" "strings" "time" @@ -19,89 +15,20 @@ import ( ) func pathIssuerGenerateRoot(b *backend) *framework.Path { - pattern := "issuers/generate/root/" + framework.GenericNameRegex("exported") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuers, - OperationVerb: "generate", - OperationSuffix: "root", - } - - return buildPathGenerateRoot(b, pattern, displayAttrs) + return buildPathGenerateRoot(b, "issuers/generate/root/"+framework.GenericNameRegex("exported")) } func pathRotateRoot(b *backend) *framework.Path { - pattern := "root/rotate/" + framework.GenericNameRegex("exported") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuers, - OperationVerb: "rotate", - OperationSuffix: "root", - } - - return buildPathGenerateRoot(b, pattern, displayAttrs) + return buildPathGenerateRoot(b, "root/rotate/"+framework.GenericNameRegex("exported")) } -func buildPathGenerateRoot(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathGenerateRoot(b *backend, pattern string) *framework.Path { ret := &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, + Pattern: pattern, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathCAGenerateRoot, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "expiration": { - Type: framework.TypeString, - Description: `The expiration of the given.`, - Required: true, - }, - "serial_number": { - Type: framework.TypeString, - Description: `The requested Subject's named serial number.`, - Required: true, - }, - "certificate": { - Type: framework.TypeString, - Description: `The generated self-signed CA certificate.`, - Required: true, - }, - "issuing_ca": { - Type: framework.TypeString, - Description: `The issuing certificate authority.`, - Required: true, - }, - "issuer_id": { - Type: framework.TypeString, - Description: `The ID of the issuer`, - Required: true, - }, - "issuer_name": { - Type: framework.TypeString, - Description: `The name of the issuer.`, - Required: true, - }, - "key_id": { - Type: framework.TypeString, - Description: `The ID of the key.`, - Required: true, - }, - "key_name": { - Type: framework.TypeString, - Description: `The key name if given.`, - Required: true, - }, - "private_key": { - Type: framework.TypeString, - Description: `The private key if exported was specified.`, - Required: false, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -119,63 +46,20 @@ func buildPathGenerateRoot(b *backend, pattern string, displayAttrs *framework.D } func pathIssuerGenerateIntermediate(b *backend) *framework.Path { - pattern := "issuers/generate/intermediate/" + framework.GenericNameRegex("exported") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuers, - OperationVerb: "generate", - OperationSuffix: "intermediate", - } - - return buildPathGenerateIntermediate(b, pattern, displayAttrs) + return buildPathGenerateIntermediate(b, + "issuers/generate/intermediate/"+framework.GenericNameRegex("exported")) } func pathCrossSignIntermediate(b *backend) *framework.Path { - pattern := "intermediate/cross-sign" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "cross-sign", - OperationSuffix: "intermediate", - } - - return buildPathGenerateIntermediate(b, pattern, displayAttrs) + return buildPathGenerateIntermediate(b, "intermediate/cross-sign") } -func buildPathGenerateIntermediate(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathGenerateIntermediate(b *backend, pattern string) *framework.Path { ret := &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, + Pattern: pattern, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathGenerateIntermediate, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "csr": { - Type: framework.TypeString, - Description: `Certificate signing request.`, - Required: true, - }, - "key_id": { - Type: framework.TypeString, - Description: `Id of the key.`, - Required: true, - }, - "private_key": { - Type: framework.TypeString, - Description: `Generated private key.`, - Required: false, - }, - "private_key_type": { - Type: framework.TypeString, - Description: `Specifies the format used for marshaling the private key.`, - Required: false, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -206,13 +90,6 @@ with Active Directory Certificate Services.`, func pathImportIssuer(b *backend) *framework.Path { return &framework.Path{ Pattern: "issuers/import/(cert|bundle)", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuers, - OperationVerb: "import", - OperationSuffix: "cert|bundle", - }, - Fields: map[string]*framework.FieldSchema{ "pem_bundle": { Type: framework.TypeString, @@ -224,38 +101,6 @@ secret-key (optional) and certificates.`, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathImportIssuers, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "mapping": { - Type: framework.TypeMap, - Description: "A mapping of issuer_id to key_id for all issuers included in this request", - Required: true, - }, - "imported_keys": { - Type: framework.TypeCommaStringSlice, - Description: "Net-new keys imported as a part of this request", - Required: true, - }, - "imported_issuers": { - Type: framework.TypeCommaStringSlice, - Description: "Net-new issuers imported as a part of this request", - Required: true, - }, - "existing_keys": { - Type: framework.TypeCommaStringSlice, - Description: "Existing keys specified as part of the import bundle of this request", - Required: true, - }, - "existing_issuers": { - Type: framework.TypeCommaStringSlice, - Description: "Existing issuers specified as part of the import bundle of this request", - Required: true, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -318,8 +163,6 @@ func (b *backend) pathImportIssuers(ctx context.Context, req *logical.Request, d var createdKeys []string var createdIssuers []string - var existingKeys []string - var existingIssuers []string issuerKeyMap := make(map[string]string) // Rather than using certutil.ParsePEMBundle (which restricts the @@ -376,8 +219,6 @@ func (b *backend) pathImportIssuers(ctx context.Context, req *logical.Request, d if !existing { createdKeys = append(createdKeys, key.ID.String()) - } else { - existingKeys = append(existingKeys, key.ID.String()) } } @@ -390,8 +231,6 @@ func (b *backend) pathImportIssuers(ctx context.Context, req *logical.Request, d issuerKeyMap[cert.ID.String()] = cert.KeyID.String() if !existing { createdIssuers = append(createdIssuers, cert.ID.String()) - } else { - existingIssuers = append(existingIssuers, cert.ID.String()) } } @@ -400,13 +239,11 @@ func (b *backend) pathImportIssuers(ctx context.Context, req *logical.Request, d "mapping": issuerKeyMap, "imported_keys": createdKeys, "imported_issuers": createdIssuers, - "existing_keys": existingKeys, - "existing_issuers": existingIssuers, }, } if len(createdIssuers) > 0 { - warnings, err := b.crlBuilder.rebuild(sc, true) + err := b.crlBuilder.rebuild(sc, true) if err != nil { // Before returning, check if the error message includes the // string "PSS". If so, it indicates we might've wanted to modify @@ -421,9 +258,6 @@ func (b *backend) pathImportIssuers(ctx context.Context, req *logical.Request, d return nil, err } - for index, warning := range warnings { - response.AddWarning(fmt.Sprintf("Warning %d during CRL rebuild: %v", index+1, warning)) - } var issuersWithKeys []string for _, issuer := range createdIssuers { @@ -515,100 +349,11 @@ func pathRevokeIssuer(b *backend) *framework.Path { return &framework.Path{ Pattern: "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/revoke", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "revoke", - OperationSuffix: "issuer", - }, - - Fields: fields, + Fields: fields, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathRevokeIssuer, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "issuer_id": { - Type: framework.TypeString, - Description: `ID of the issuer`, - Required: true, - }, - "issuer_name": { - Type: framework.TypeString, - Description: `Name of the issuer`, - Required: true, - }, - "key_id": { - Type: framework.TypeString, - Description: `ID of the Key`, - Required: true, - }, - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: true, - }, - "manual_chain": { - Type: framework.TypeCommaStringSlice, - Description: `Manual Chain`, - Required: true, - }, - "ca_chain": { - Type: framework.TypeCommaStringSlice, - Description: `Certificate Authority Chain`, - Required: true, - }, - "leaf_not_after_behavior": { - Type: framework.TypeString, - Description: ``, - Required: true, - }, - "usage": { - Type: framework.TypeString, - Description: `Allowed usage`, - Required: true, - }, - "revocation_signature_algorithm": { - Type: framework.TypeString, - Description: `Which signature algorithm to use when building CRLs`, - Required: true, - }, - "revoked": { - Type: framework.TypeBool, - Description: `Whether the issuer was revoked`, - Required: true, - }, - "issuing_certificates": { - Type: framework.TypeCommaStringSlice, - Description: `Specifies the URL values for the Issuing Certificate field`, - Required: true, - }, - "crl_distribution_points": { - Type: framework.TypeStringSlice, - Description: `Specifies the URL values for the CRL Distribution Points field`, - Required: true, - }, - "ocsp_servers": { - Type: framework.TypeStringSlice, - Description: `Specifies the URL values for the OCSP Servers field`, - Required: true, - }, - "revocation_time": { - Type: framework.TypeInt64, - Description: `Time of revocation`, - Required: false, - }, - "revocation_time_rfc3339": { - Type: framework.TypeTime, - Description: `RFC formatted time of revocation`, - Required: false, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -730,7 +475,7 @@ func (b *backend) pathRevokeIssuer(ctx context.Context, req *logical.Request, da } // Rebuild the CRL to include the newly revoked issuer. - warnings, crlErr := b.crlBuilder.rebuild(sc, false) + crlErr := b.crlBuilder.rebuild(sc, false) if crlErr != nil { switch crlErr.(type) { case errutil.UserError: @@ -746,9 +491,6 @@ func (b *backend) pathRevokeIssuer(ctx context.Context, req *logical.Request, da // Impossible. return nil, err } - for index, warning := range warnings { - response.AddWarning(fmt.Sprintf("Warning %d during CRL rebuild: %v", index+1, warning)) - } // For sanity, we'll add a warning message here if there's no other // issuer which verifies this issuer. diff --git a/builtin/logical/pki/path_manage_keys.go b/builtin/logical/pki/path_manage_keys.go index 3c10c32903d346..90119ce4e8a1e2 100644 --- a/builtin/logical/pki/path_manage_keys.go +++ b/builtin/logical/pki/path_manage_keys.go @@ -1,13 +1,9 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "bytes" "context" "encoding/pem" - "net/http" "strings" "github.com/hashicorp/vault/sdk/framework" @@ -19,12 +15,6 @@ func pathGenerateKey(b *backend) *framework.Path { return &framework.Path{ Pattern: "keys/generate/(internal|exported|kms)", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "generate", - OperationSuffix: "internal-key|exported-key|kms-key", - }, - Fields: map[string]*framework.FieldSchema{ keyNameParam: { Type: framework.TypeString, @@ -64,36 +54,7 @@ is required. Ignored for other types.`, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathGenerateKeyHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "key_id": { - Type: framework.TypeString, - Description: `ID assigned to this key.`, - Required: true, - }, - "key_name": { - Type: framework.TypeString, - Description: `Name assigned to this key.`, - Required: true, - }, - "key_type": { - Type: framework.TypeString, - Description: `The type of key to use; defaults to RSA. "rsa" - "ec" and "ed25519" are the only valid values.`, - Required: true, - }, - "private_key": { - Type: framework.TypeString, - Description: `The private key string`, - Required: false, - }, - }, - }}, - }, - + Callback: b.pathGenerateKeyHandler, ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, @@ -188,12 +149,6 @@ func pathImportKey(b *backend) *framework.Path { return &framework.Path{ Pattern: "keys/import", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "import", - OperationSuffix: "key", - }, - Fields: map[string]*framework.FieldSchema{ keyNameParam: { Type: framework.TypeString, @@ -207,30 +162,7 @@ func pathImportKey(b *backend) *framework.Path { Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathImportKeyHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "key_id": { - Type: framework.TypeString, - Description: `ID assigned to this key.`, - Required: true, - }, - "key_name": { - Type: framework.TypeString, - Description: `Name assigned to this key.`, - Required: true, - }, - "key_type": { - Type: framework.TypeString, - Description: `The type of key to use; defaults to RSA. "rsa" - "ec" and "ed25519" are the only valid values.`, - Required: true, - }, - }, - }}, - }, + Callback: b.pathImportKeyHandler, ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, diff --git a/builtin/logical/pki/path_manage_keys_test.go b/builtin/logical/pki/path_manage_keys_test.go index 3c5708a8bb04d7..7b53ae836ee61b 100644 --- a/builtin/logical/pki/path_manage_keys_test.go +++ b/builtin/logical/pki/path_manage_keys_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -12,8 +9,6 @@ import ( "fmt" "testing" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" - "github.com/hashicorp/vault/sdk/helper/certutil" "github.com/hashicorp/vault/sdk/logical" @@ -100,8 +95,6 @@ func TestPKI_PathManageKeys_GenerateExportedKeys(t *testing.T) { }, MountPoint: "pki/", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("keys/generate/exported"), logical.UpdateOperation), resp, true) - require.NoError(t, err, "Failed generating exported key") require.NotNil(t, resp, "Got nil response generating exported key") require.Equal(t, "ec", resp.Data["key_type"], "key_type field contained an invalid type") @@ -143,9 +136,6 @@ func TestPKI_PathManageKeys_ImportKeyBundle(t *testing.T) { }, MountPoint: "pki/", }) - - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("keys/import"), logical.UpdateOperation), resp, true) - require.NoError(t, err, "Failed importing ec key") require.NotNil(t, resp, "Got nil response importing ec key") require.False(t, resp.IsError(), "received an error response: %v", resp.Error()) @@ -334,8 +324,6 @@ func TestPKI_PathManageKeys_UpdateKeyDetails(t *testing.T) { Data: map[string]interface{}{"key_name": "new-name"}, MountPoint: "pki/", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("key/"+keyId.String()), logical.UpdateOperation), resp, true) - require.NoError(t, err, "failed updating key with new name") require.NotNil(t, resp, "Got nil response updating key with new name") require.False(t, resp.IsError(), "unexpected error updating key with new name: %#v", resp.Error()) @@ -346,8 +334,6 @@ func TestPKI_PathManageKeys_UpdateKeyDetails(t *testing.T) { Storage: s, MountPoint: "pki/", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("key/"+keyId.String()), logical.ReadOperation), resp, true) - require.NoError(t, err, "failed reading key after name update") require.NotNil(t, resp, "Got nil response reading key after name update") require.False(t, resp.IsError(), "unexpected error reading key: %#v", resp.Error()) diff --git a/builtin/logical/pki/path_ocsp.go b/builtin/logical/pki/path_ocsp.go index b9f5cd1f9fd98b..f62f1d808a0f87 100644 --- a/builtin/logical/pki/path_ocsp.go +++ b/builtin/logical/pki/path_ocsp.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -71,33 +68,16 @@ var ( ) func buildPathOcspGet(b *backend) *framework.Path { - pattern := "ocsp/" + framework.MatchAllRegex(ocspReqParam) - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "query", - OperationSuffix: "ocsp-with-get-req", - } - - return buildOcspGetWithPath(b, pattern, displayAttrs) + return buildOcspGetWithPath(b, "ocsp/"+framework.MatchAllRegex(ocspReqParam)) } func buildPathUnifiedOcspGet(b *backend) *framework.Path { - pattern := "unified-ocsp/" + framework.MatchAllRegex(ocspReqParam) - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "query", - OperationSuffix: "unified-ocsp-with-get-req", - } - - return buildOcspGetWithPath(b, pattern, displayAttrs) + return buildOcspGetWithPath(b, "unified-ocsp/"+framework.MatchAllRegex(ocspReqParam)) } -func buildOcspGetWithPath(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildOcspGetWithPath(b *backend, pattern string) *framework.Path { return &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, + Pattern: pattern, Fields: map[string]*framework.FieldSchema{ ocspReqParam: { Type: framework.TypeString, @@ -116,33 +96,16 @@ func buildOcspGetWithPath(b *backend, pattern string, displayAttrs *framework.Di } func buildPathOcspPost(b *backend) *framework.Path { - pattern := "ocsp" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "query", - OperationSuffix: "ocsp", - } - - return buildOcspPostWithPath(b, pattern, displayAttrs) + return buildOcspPostWithPath(b, "ocsp") } func buildPathUnifiedOcspPost(b *backend) *framework.Path { - pattern := "unified-ocsp" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "query", - OperationSuffix: "unified-ocsp", - } - - return buildOcspPostWithPath(b, pattern, displayAttrs) + return buildOcspPostWithPath(b, "unified-ocsp") } -func buildOcspPostWithPath(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildOcspPostWithPath(b *backend, pattern string) *framework.Path { return &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, + Pattern: pattern, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.ocspHandler, diff --git a/builtin/logical/pki/path_ocsp_test.go b/builtin/logical/pki/path_ocsp_test.go index ab1173fa342933..84f7a10ed8eed3 100644 --- a/builtin/logical/pki/path_ocsp_test.go +++ b/builtin/logical/pki/path_ocsp_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -17,8 +14,6 @@ import ( "testing" "time" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" - vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/vault" @@ -202,7 +197,6 @@ func TestOcsp_UnknownIssuerIdWithDefaultHavingOcspUsageRemoved(t *testing.T) { resp, err := CBWrite(b, s, "revoke", map[string]interface{}{ "serial_number": serial, }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("revoke"), logical.UpdateOperation), resp, true) requireSuccessNonNilResponse(t, resp, err, "revoke") // Twiddle the entry so that the issuer id is no longer valid. diff --git a/builtin/logical/pki/path_resign_crls.go b/builtin/logical/pki/path_resign_crls.go index a82f94f32fbcdc..7f8746aa9452b9 100644 --- a/builtin/logical/pki/path_resign_crls.go +++ b/builtin/logical/pki/path_resign_crls.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -15,7 +12,6 @@ import ( "errors" "fmt" "math/big" - "net/http" "strconv" "strings" "time" @@ -43,13 +39,6 @@ var ( func pathResignCrls(b *backend) *framework.Path { return &framework.Path{ Pattern: "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/resign-crls", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationVerb: "resign", - OperationSuffix: "crls", - }, - Fields: map[string]*framework.FieldSchema{ issuerRefParam: { Type: framework.TypeString, @@ -88,18 +77,6 @@ base64 encoded. Defaults to "pem".`, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathUpdateResignCrlsHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "crl": { - Type: framework.TypeString, - Description: `CRL`, - Required: true, - }, - }, - }}, - }, }, }, @@ -112,13 +89,6 @@ base64 encoded. Defaults to "pem".`, func pathSignRevocationList(b *backend) *framework.Path { return &framework.Path{ Pattern: "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/sign-revocation-list", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationVerb: "sign", - OperationSuffix: "revocation-list", - }, - Fields: map[string]*framework.FieldSchema{ issuerRefParam: { Type: framework.TypeString, @@ -163,18 +133,6 @@ value (string)`, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathUpdateSignRevocationListHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "crl": { - Type: framework.TypeString, - Description: `CRL`, - Required: true, - }, - }, - }}, - }, }, }, diff --git a/builtin/logical/pki/path_resign_crls_test.go b/builtin/logical/pki/path_resign_crls_test.go index f1ee1152c041a5..b6870d34e7a733 100644 --- a/builtin/logical/pki/path_resign_crls_test.go +++ b/builtin/logical/pki/path_resign_crls_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -13,8 +10,6 @@ import ( "testing" "time" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" - "github.com/hashicorp/vault/api" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/vault" @@ -60,7 +55,6 @@ func TestResignCrls_NormalCrl(t *testing.T) { "format": "pem", "crls": []string{crl1, crl2}, }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b1.Route("issuer/default/resign-crls"), logical.UpdateOperation), resp, true) requireSuccessNonNilResponse(t, resp, err) requireFieldsSetInResp(t, resp, "crl") pemCrl := resp.Data["crl"].(string) @@ -357,7 +351,6 @@ func TestSignRevocationList_NoRevokedCerts(t *testing.T) { "next_update": "12h", "format": "pem", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("issuer/default/sign-revocation-list"), logical.UpdateOperation), resp, true) requireSuccessNonNilResponse(t, resp, err) requireFieldsSetInResp(t, resp, "crl") pemCrl := resp.Data["crl"].(string) diff --git a/builtin/logical/pki/path_revoke.go b/builtin/logical/pki/path_revoke.go index 7aa23bc7230521..ff8393e036199d 100644 --- a/builtin/logical/pki/path_revoke.go +++ b/builtin/logical/pki/path_revoke.go @@ -1,11 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "context" - "crypto" "crypto/ecdsa" "crypto/ed25519" "crypto/rsa" @@ -13,7 +9,6 @@ import ( "crypto/x509" "encoding/pem" "fmt" - "net/http" "strings" "time" @@ -29,26 +24,9 @@ func pathListCertsRevoked(b *backend) *framework.Path { return &framework.Path{ Pattern: "certs/revoked/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "revoked-certs", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathListRevokedCertsHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "keys": { - Type: framework.TypeStringSlice, - Description: `List of Keys`, - Required: false, - }, - }, - }}, - }, }, }, @@ -61,11 +39,6 @@ func pathListCertsRevocationQueue(b *backend) *framework.Path { return &framework.Path{ Pattern: "certs/revocation-queue/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "certs-revocation-queue", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathListRevocationQueueHandler, @@ -80,12 +53,6 @@ func pathListCertsRevocationQueue(b *backend) *framework.Path { func pathRevoke(b *backend) *framework.Path { return &framework.Path{ Pattern: `revoke`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "revoke", - }, - Fields: map[string]*framework.FieldSchema{ "serial_number": { Type: framework.TypeString, @@ -106,28 +73,6 @@ signed by an issuer in this mount.`, // If this needs to write, the entire request will be forwarded to the // active node of the current performance cluster, but we don't want to // forward invalid revoke requests there. - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "revocation_time": { - Type: framework.TypeDurationSecond, - Description: `Revocation Time`, - Required: false, - }, - "revocation_time_rfc3339": { - Type: framework.TypeTime, - Description: `Revocation Time`, - Required: false, - }, - "state": { - Type: framework.TypeString, - Description: `Revocation State`, - Required: false, - }, - }, - }}, - }, }, }, @@ -139,13 +84,6 @@ signed by an issuer in this mount.`, func pathRevokeWithKey(b *backend) *framework.Path { return &framework.Path{ Pattern: `revoke-with-key`, - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "revoke", - OperationSuffix: "with-key", - }, - Fields: map[string]*framework.FieldSchema{ "serial_number": { Type: framework.TypeString, @@ -171,28 +109,6 @@ be in PEM format.`, // If this needs to write, the entire request will be forwarded to the // active node of the current performance cluster, but we don't want to // forward invalid revoke requests there. - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "revocation_time": { - Type: framework.TypeDurationSecond, - Description: `Revocation Time`, - Required: false, - }, - "revocation_time_rfc3339": { - Type: framework.TypeTime, - Description: `Revocation Time`, - Required: false, - }, - "state": { - Type: framework.TypeString, - Description: `Revocation State`, - Required: false, - }, - }, - }}, - }, }, }, @@ -205,12 +121,6 @@ func pathRotateCRL(b *backend) *framework.Path { return &framework.Path{ Pattern: `crl/rotate`, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "rotate", - OperationSuffix: "crl", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ Callback: b.pathRotateCRLRead, @@ -218,18 +128,6 @@ func pathRotateCRL(b *backend) *framework.Path { // so this request should be forwarded when it is first seen, not // when it is ready to write. ForwardPerformanceStandby: true, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "success": { - Type: framework.TypeBool, - Description: `Whether rotation was successful`, - Required: true, - }, - }, - }}, - }, }, }, @@ -242,12 +140,6 @@ func pathRotateDeltaCRL(b *backend) *framework.Path { return &framework.Path{ Pattern: `crl/rotate-delta`, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "rotate", - OperationSuffix: "delta-crl", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ Callback: b.pathRotateDeltaCRLRead, @@ -255,18 +147,6 @@ func pathRotateDeltaCRL(b *backend) *framework.Path { // so this request should be forwarded when it is first seen, not // when it is ready to write. ForwardPerformanceStandby: true, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "success": { - Type: framework.TypeBool, - Description: `Whether rotation was successful`, - Required: true, - }, - }, - }}, - }, }, }, @@ -279,31 +159,9 @@ func pathListUnifiedRevoked(b *backend) *framework.Path { return &framework.Path{ Pattern: "certs/unified-revoked/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "unified-revoked-certs", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathListUnifiedRevokedCertsHandler, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "keys": { - Type: framework.TypeStringSlice, - Description: `List of Keys`, - Required: false, - }, - "key_info": { - Type: framework.TypeString, - Description: `Key information`, - Required: false, - }, - }, - }}, - }, }, }, @@ -447,10 +305,6 @@ func (b *backend) pathRevokeWriteHandleKey(req *logical.Request, certReference * return fmt.Errorf("failed to parse provided private key: %w", err) } - return validatePrivateKeyMatchesCert(signer, certReference) -} - -func validatePrivateKeyMatchesCert(signer crypto.Signer, certReference *x509.Certificate) error { // Finally, verify if the cert and key match. This code has been // cribbed from the Go TLS config code, with minor modifications. // @@ -458,6 +312,7 @@ func validatePrivateKeyMatchesCert(signer crypto.Signer, certReference *x509.Cer // components and ensure we validate exponent and curve information // as well. // + // // See: https://github.com/golang/go/blob/c6a2dada0df8c2d75cf3ae599d7caed77d416fa2/src/crypto/tls/tls.go#L304-L331 switch certPub := certReference.PublicKey.(type) { case *rsa.PublicKey: @@ -655,7 +510,7 @@ func (b *backend) pathRotateCRLRead(ctx context.Context, req *logical.Request, _ defer b.revokeStorageLock.RUnlock() sc := b.makeStorageContext(ctx, req.Storage) - warnings, crlErr := b.crlBuilder.rebuild(sc, false) + crlErr := b.crlBuilder.rebuild(sc, false) if crlErr != nil { switch crlErr.(type) { case errutil.UserError: @@ -665,17 +520,11 @@ func (b *backend) pathRotateCRLRead(ctx context.Context, req *logical.Request, _ } } - resp := &logical.Response{ + return &logical.Response{ Data: map[string]interface{}{ "success": true, }, - } - - for index, warning := range warnings { - resp.AddWarning(fmt.Sprintf("Warning %d during CRL rebuild: %v", index+1, warning)) - } - - return resp, nil + }, nil } func (b *backend) pathRotateDeltaCRLRead(ctx context.Context, req *logical.Request, _ *framework.FieldData) (*logical.Response, error) { @@ -688,7 +537,7 @@ func (b *backend) pathRotateDeltaCRLRead(ctx context.Context, req *logical.Reque isEnabled := cfg.EnableDelta - warnings, crlErr := b.crlBuilder.rebuildDeltaCRLsIfForced(sc, true) + crlErr := b.crlBuilder.rebuildDeltaCRLsIfForced(sc, true) if crlErr != nil { switch crlErr.(type) { case errutil.UserError: @@ -707,9 +556,6 @@ func (b *backend) pathRotateDeltaCRLRead(ctx context.Context, req *logical.Reque if !isEnabled { resp.AddWarning("requested rebuild of delta CRL when delta CRL is not enabled; this is a no-op") } - for index, warning := range warnings { - resp.AddWarning(fmt.Sprintf("Warning %d during CRL rebuild: %v", index+1, warning)) - } return resp, nil } diff --git a/builtin/logical/pki/path_roles.go b/builtin/logical/pki/path_roles.go index 16564085e76f96..9137f2b9fe5267 100644 --- a/builtin/logical/pki/path_roles.go +++ b/builtin/logical/pki/path_roles.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -8,7 +5,6 @@ import ( "crypto/x509" "encoding/json" "fmt" - "net/http" "strings" "time" @@ -24,26 +20,9 @@ func pathListRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "roles", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ListOperation: &framework.PathOperation{ Callback: b.pathRoleList, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "keys": { - Type: framework.TypeStringSlice, - Description: "List of roles", - Required: true, - }, - }, - }}, - }, }, }, @@ -53,360 +32,8 @@ func pathListRoles(b *backend) *framework.Path { } func pathRoles(b *backend) *framework.Path { - pathRolesResponseFields := map[string]*framework.FieldSchema{ - "ttl": { - Type: framework.TypeDurationSecond, - Required: true, - Description: `The lease duration (validity period of the -certificate) if no specific lease duration is requested. -The lease duration controls the expiration of certificates -issued by this backend. Defaults to the system default -value or the value of max_ttl, whichever is shorter.`, - }, - - "max_ttl": { - Type: framework.TypeDurationSecond, - Required: true, - Description: `The maximum allowed lease duration. If not -set, defaults to the system maximum lease TTL.`, - }, - "allow_token_displayname": { - Type: framework.TypeBool, - Required: true, - Description: `Whether to allow "localhost" and "localdomain" -as a valid common name in a request, independent of allowed_domains value.`, - }, - - "allow_localhost": { - Type: framework.TypeBool, - Required: true, - Description: `Whether to allow "localhost" and "localdomain" -as a valid common name in a request, independent of allowed_domains value.`, - }, - - "allowed_domains": { - Type: framework.TypeCommaStringSlice, - Required: true, - Description: `Specifies the domains this role is allowed -to issue certificates for. This is used with the allow_bare_domains, -allow_subdomains, and allow_glob_domains to determine matches for the -common name, DNS-typed SAN entries, and Email-typed SAN entries of -certificates. See the documentation for more information. This parameter -accepts a comma-separated string or list of domains.`, - }, - "allowed_domains_template": { - Type: framework.TypeBool, - Required: true, - Description: `If set, Allowed domains can be specified using identity template policies. - Non-templated domains are also permitted.`, - }, - "allow_bare_domains": { - Type: framework.TypeBool, - Required: true, - Description: `If set, clients can request certificates -for the base domains themselves, e.g. "example.com" of domains listed -in allowed_domains. This is a separate option as in some cases this can -be considered a security threat. See the documentation for more -information.`, - }, - - "allow_subdomains": { - Type: framework.TypeBool, - Required: true, - Description: `If set, clients can request certificates for -subdomains of domains listed in allowed_domains, including wildcard -subdomains. See the documentation for more information.`, - }, - - "allow_glob_domains": { - Type: framework.TypeBool, - Required: true, - Description: `If set, domains specified in allowed_domains -can include shell-style glob patterns, e.g. "ftp*.example.com". -See the documentation for more information.`, - }, - - "allow_wildcard_certificates": { - Type: framework.TypeBool, - Required: true, - Description: `If set, allows certificates with wildcards in -the common name to be issued, conforming to RFC 6125's Section 6.4.3; e.g., -"*.example.net" or "b*z.example.net". See the documentation for more -information.`, - }, - - "allow_any_name": { - Type: framework.TypeBool, - Required: true, - Description: `If set, clients can request certificates for -any domain, regardless of allowed_domains restrictions. -See the documentation for more information.`, - }, - - "enforce_hostnames": { - Type: framework.TypeBool, - Required: true, - Description: `If set, only valid host names are allowed for -CN and DNS SANs, and the host part of email addresses. Defaults to true.`, - }, - - "allow_ip_sans": { - Type: framework.TypeBool, - Required: true, - Description: `If set, IP Subject Alternative Names are allowed. -Any valid IP is accepted and No authorization checking is performed.`, - }, - - "allowed_uri_sans": { - Type: framework.TypeCommaStringSlice, - Required: true, - Description: `If set, an array of allowed URIs for URI Subject Alternative Names. -Any valid URI is accepted, these values support globbing.`, - }, - - "allowed_uri_sans_template": { - Type: framework.TypeBool, - Required: true, - Description: `If set, Allowed URI SANs can be specified using identity template policies. - Non-templated URI SANs are also permitted.`, - }, - - "allowed_other_sans": { - Type: framework.TypeCommaStringSlice, - Required: true, - Description: `If set, an array of allowed other names to put in SANs. These values support globbing and must be in the format ;:. Currently only "utf8" is a valid type. All values, including globbing values, must use this syntax, with the exception being a single "*" which allows any OID and any value (but type must still be utf8).`, - }, - - "allowed_serial_numbers": { - Type: framework.TypeCommaStringSlice, - Required: true, - Description: `If set, an array of allowed serial numbers to put in Subject. These values support globbing.`, - }, - "allowed_user_ids": { - Type: framework.TypeCommaStringSlice, - Description: `If set, an array of allowed user-ids to put in user system login name specified here: https://www.rfc-editor.org/rfc/rfc1274#section-9.3.1`, - }, - "server_flag": { - Type: framework.TypeBool, - Default: true, - Description: `If set, certificates are flagged for server auth use. -Defaults to true. See also RFC 5280 Section 4.2.1.12.`, - }, - - "client_flag": { - Type: framework.TypeBool, - Required: true, - Description: `If set, certificates are flagged for client auth use. -Defaults to true. See also RFC 5280 Section 4.2.1.12.`, - }, - - "code_signing_flag": { - Type: framework.TypeBool, - Required: true, - Description: `If set, certificates are flagged for code signing -use. Defaults to false. See also RFC 5280 Section 4.2.1.12.`, - }, - - "email_protection_flag": { - Type: framework.TypeBool, - Required: true, - Description: `If set, certificates are flagged for email -protection use. Defaults to false. See also RFC 5280 Section 4.2.1.12.`, - }, - - "key_type": { - Type: framework.TypeString, - Required: true, - Description: `The type of key to use; defaults to RSA. "rsa" -"ec", "ed25519" and "any" are the only valid values.`, - }, - - "key_bits": { - Type: framework.TypeInt, - Required: true, - Description: `The number of bits to use. Allowed values are -0 (universal default); with rsa key_type: 2048 (default), 3072, or -4096; with ec key_type: 224, 256 (default), 384, or 521; ignored with -ed25519.`, - }, - "signature_bits": { - Type: framework.TypeInt, - Required: true, - Description: `The number of bits to use in the signature -algorithm; accepts 256 for SHA-2-256, 384 for SHA-2-384, and 512 for -SHA-2-512. Defaults to 0 to automatically detect based on key length -(SHA-2-256 for RSA keys, and matching the curve size for NIST P-Curves).`, - }, - "use_pss": { - Type: framework.TypeBool, - Required: false, - Description: `Whether or not to use PSS signatures when using a -RSA key-type issuer. Defaults to false.`, - }, - "key_usage": { - Type: framework.TypeCommaStringSlice, - Required: true, - Description: `A comma-separated string or list of key usages (not extended -key usages). Valid values can be found at -https://golang.org/pkg/crypto/x509/#KeyUsage --- simply drop the "KeyUsage" part of the name. -To remove all key usages from being set, set -this value to an empty list. See also RFC 5280 -Section 4.2.1.3.`, - }, - - "ext_key_usage": { - Type: framework.TypeCommaStringSlice, - Required: true, - Description: `A comma-separated string or list of extended key usages. Valid values can be found at -https://golang.org/pkg/crypto/x509/#ExtKeyUsage --- simply drop the "ExtKeyUsage" part of the name. -To remove all key usages from being set, set -this value to an empty list. See also RFC 5280 -Section 4.2.1.12.`, - }, - - "ext_key_usage_oids": { - Type: framework.TypeCommaStringSlice, - Required: true, - Description: `A comma-separated string or list of extended key usage oids.`, - }, - - "use_csr_common_name": { - Type: framework.TypeBool, - Required: true, - Description: `If set, when used with a signing profile, -the common name in the CSR will be used. This -does *not* include any requested Subject Alternative -Names; use use_csr_sans for that. Defaults to true.`, - }, - - "use_csr_sans": { - Type: framework.TypeBool, - Required: true, - Description: `If set, when used with a signing profile, -the SANs in the CSR will be used. This does *not* -include the Common Name (cn); use use_csr_common_name -for that. Defaults to true.`, - }, - - "ou": { - Type: framework.TypeCommaStringSlice, - Description: `If set, OU (OrganizationalUnit) will be set to -this value in certificates issued by this role.`, - }, - - "organization": { - Type: framework.TypeCommaStringSlice, - Description: `If set, O (Organization) will be set to -this value in certificates issued by this role.`, - }, - - "country": { - Type: framework.TypeCommaStringSlice, - Description: `If set, Country will be set to -this value in certificates issued by this role.`, - }, - - "locality": { - Type: framework.TypeCommaStringSlice, - Description: `If set, Locality will be set to -this value in certificates issued by this role.`, - }, - - "province": { - Type: framework.TypeCommaStringSlice, - Description: `If set, Province will be set to -this value in certificates issued by this role.`, - }, - - "street_address": { - Type: framework.TypeCommaStringSlice, - Description: `If set, Street Address will be set to -this value in certificates issued by this role.`, - }, - - "postal_code": { - Type: framework.TypeCommaStringSlice, - Description: `If set, Postal Code will be set to -this value in certificates issued by this role.`, - }, - - "generate_lease": { - Type: framework.TypeBool, - Description: ` -If set, certificates issued/signed against this role will have Vault leases -attached to them. Defaults to "false". Certificates can be added to the CRL by -"vault revoke " when certificates are associated with leases. It can -also be done using the "pki/revoke" endpoint. However, when lease generation is -disabled, invoking "pki/revoke" would be the only way to add the certificates -to the CRL. When large number of certificates are generated with long -lifetimes, it is recommended that lease generation be disabled, as large amount of -leases adversely affect the startup time of Vault.`, - }, - - "no_store": { - Type: framework.TypeBool, - Description: ` -If set, certificates issued/signed against this role will not be stored in the -storage backend. This can improve performance when issuing large numbers of -certificates. However, certificates issued in this way cannot be enumerated -or revoked, so this option is recommended only for certificates that are -non-sensitive, or extremely short-lived. This option implies a value of "false" -for "generate_lease".`, - }, - - "require_cn": { - Type: framework.TypeBool, - Description: `If set to false, makes the 'common_name' field optional while generating a certificate.`, - }, - - "cn_validations": { - Type: framework.TypeCommaStringSlice, - Description: `List of allowed validations to run against the -Common Name field. Values can include 'email' to validate the CN is a email -address, 'hostname' to validate the CN is a valid hostname (potentially -including wildcards). When multiple validations are specified, these take -OR semantics (either email OR hostname are allowed). The special value -'disabled' allows disabling all CN name validations, allowing for arbitrary -non-Hostname, non-Email address CNs.`, - }, - - "policy_identifiers": { - Type: framework.TypeCommaStringSlice, - Description: `A comma-separated string or list of policy OIDs, or a JSON list of qualified policy -information, which must include an oid, and may include a notice and/or cps url, using the form -[{"oid"="1.3.6.1.4.1.7.8","notice"="I am a user Notice"}, {"oid"="1.3.6.1.4.1.44947.1.2.4 ","cps"="https://example.com"}].`, - }, - - "basic_constraints_valid_for_non_ca": { - Type: framework.TypeBool, - Description: `Mark Basic Constraints valid when issuing non-CA certificates.`, - }, - "not_before_duration": { - Type: framework.TypeDurationSecond, - Description: `The duration before now which the certificate needs to be backdated by.`, - }, - "not_after": { - Type: framework.TypeString, - Description: `Set the not after field of the certificate with specified date value. -The value format should be given in UTC format YYYY-MM-ddTHH:MM:SSZ.`, - }, - "issuer_ref": { - Type: framework.TypeString, - Description: `Reference to the issuer used to sign requests -serviced by this role.`, - }, - } - return &framework.Path{ Pattern: "roles/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "role", - }, - Fields: map[string]*framework.FieldSchema{ "backend": { Type: framework.TypeString, @@ -823,44 +450,21 @@ serviced by this role.`, Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ Callback: b.pathRoleRead, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: pathRolesResponseFields, - }}, - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathRoleCreate, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: pathRolesResponseFields, - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathRoleDelete, - Responses: map[int][]framework.Response{ - http.StatusNoContent: {{ - Description: "No Content", - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, }, logical.PatchOperation: &framework.PathOperation{ Callback: b.pathRolePatch, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: pathRolesResponseFields, - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -1014,8 +618,6 @@ func (b *backend) getRole(ctx context.Context, s logical.Storage, n string) (*ro } } - result.Name = n - return &result, nil } @@ -1107,7 +709,6 @@ func (b *backend) pathRoleCreate(ctx context.Context, req *logical.Request, data NotBeforeDuration: time.Duration(data.Get("not_before_duration").(int)) * time.Second, NotAfter: data.Get("not_after").(string), Issuer: data.Get("issuer_ref").(string), - Name: name, } allowedOtherSANs := data.Get("allowed_other_sans").([]string) @@ -1513,8 +1114,6 @@ type roleEntry struct { NotBeforeDuration time.Duration `json:"not_before_duration"` NotAfter string `json:"not_after"` Issuer string `json:"issuer"` - // Name is only set when the role has been stored, on the fly roles have a blank name - Name string `json:"-"` } func (r *roleEntry) ToResponseData() map[string]interface{} { diff --git a/builtin/logical/pki/path_roles_test.go b/builtin/logical/pki/path_roles_test.go index 98f5e277bbafaa..315c6d0bec7483 100644 --- a/builtin/logical/pki/path_roles_test.go +++ b/builtin/logical/pki/path_roles_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -14,7 +11,6 @@ import ( "github.com/go-errors/errors" "github.com/hashicorp/go-secure-stdlib/strutil" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" "github.com/hashicorp/vault/sdk/logical" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -145,14 +141,12 @@ func TestPki_RoleKeyUsage(t *testing.T) { } resp, err = b.HandleRequest(context.Background(), roleReq) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route(roleReq.Path), logical.UpdateOperation), resp, true) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("bad: err: %v resp: %#v", err, resp) } roleReq.Operation = logical.ReadOperation resp, err = b.HandleRequest(context.Background(), roleReq) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route(roleReq.Path), logical.ReadOperation), resp, true) if err != nil || (resp != nil && resp.IsError()) { t.Fatalf("bad: err: %v resp: %#v", err, resp) } diff --git a/builtin/logical/pki/path_root.go b/builtin/logical/pki/path_root.go index fc5476bef05f08..03aa3a3e6fdabe 100644 --- a/builtin/logical/pki/path_root.go +++ b/builtin/logical/pki/path_root.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -15,7 +12,6 @@ import ( "encoding/pem" "errors" "fmt" - "net/http" "reflect" "strings" "time" @@ -30,34 +26,15 @@ import ( ) func pathGenerateRoot(b *backend) *framework.Path { - pattern := "root/generate/" + framework.GenericNameRegex("exported") - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "generate", - OperationSuffix: "root", - } - - return buildPathGenerateRoot(b, pattern, displayAttrs) + return buildPathGenerateRoot(b, "root/generate/"+framework.GenericNameRegex("exported")) } func pathDeleteRoot(b *backend) *framework.Path { ret := &framework.Path{ Pattern: "root", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationSuffix: "root", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.DeleteOperation: &framework.PathOperation{ Callback: b.pathCADeleteRoot, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - }}, - }, // Read more about why these flags are set in backend.go ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -294,16 +271,13 @@ func (b *backend) pathCAGenerateRoot(ctx context.Context, req *logical.Request, if err != nil { return nil, fmt.Errorf("unable to store certificate locally: %w", err) } - b.ifCountEnabledIncrementTotalCertificatesCount(certsCounted, key) + b.incrementTotalCertificatesCount(certsCounted, key) // Build a fresh CRL - warnings, err = b.crlBuilder.rebuild(sc, true) + err = b.crlBuilder.rebuild(sc, true) if err != nil { return nil, err } - for index, warning := range warnings { - resp.AddWarning(fmt.Sprintf("Warning %d during CRL rebuild: %v", index+1, warning)) - } if parsedBundle.Certificate.MaxPathLen == 0 { resp.AddWarning("Max path length of the generated certificate is zero. This certificate cannot be used to issue intermediate CA certificates.") @@ -494,7 +468,7 @@ func (b *backend) pathIssuerSignIntermediate(ctx context.Context, req *logical.R if err != nil { return nil, fmt.Errorf("unable to store certificate locally: %w", err) } - b.ifCountEnabledIncrementTotalCertificatesCount(certsCounted, key) + b.incrementTotalCertificatesCount(certsCounted, key) if parsedBundle.Certificate.MaxPathLen == 0 { resp.AddWarning("Max path length of the signed certificate is zero. This certificate cannot be used to issue intermediate CA certificates.") diff --git a/builtin/logical/pki/path_sign_issuers.go b/builtin/logical/pki/path_sign_issuers.go index 0b6b8334e418f5..cadbac5553f365 100644 --- a/builtin/logical/pki/path_sign_issuers.go +++ b/builtin/logical/pki/path_sign_issuers.go @@ -1,80 +1,28 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( - "net/http" - "github.com/hashicorp/vault/sdk/framework" "github.com/hashicorp/vault/sdk/logical" ) func pathIssuerSignIntermediate(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/sign-intermediate" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationVerb: "sign", - OperationSuffix: "intermediate", - } - - return buildPathIssuerSignIntermediateRaw(b, pattern, displayAttrs) + return buildPathIssuerSignIntermediateRaw(b, pattern) } func pathSignIntermediate(b *backend) *framework.Path { pattern := "root/sign-intermediate" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIRoot, - OperationVerb: "sign", - OperationSuffix: "intermediate", - } - - return buildPathIssuerSignIntermediateRaw(b, pattern, displayAttrs) + return buildPathIssuerSignIntermediateRaw(b, pattern) } -func buildPathIssuerSignIntermediateRaw(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathIssuerSignIntermediateRaw(b *backend, pattern string) *framework.Path { fields := addIssuerRefField(map[string]*framework.FieldSchema{}) path := &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, - Fields: fields, + Pattern: pattern, + Fields: fields, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathIssuerSignIntermediate, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "expiration": { - Type: framework.TypeInt64, - Description: `Expiration Time`, - Required: true, - }, - "serial_number": { - Type: framework.TypeString, - Description: `Serial Number`, - Required: false, - }, - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: true, - }, - "issuing_ca": { - Type: framework.TypeString, - Description: `Issuing CA`, - Required: true, - }, - "ca_chain": { - Type: framework.TypeStringSlice, - Description: `CA Chain`, - Required: true, - }, - }, - }}, - }, }, }, @@ -165,29 +113,15 @@ See the API documentation for more information about required parameters. func pathIssuerSignSelfIssued(b *backend) *framework.Path { pattern := "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/sign-self-issued" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIIssuer, - OperationVerb: "sign", - OperationSuffix: "self-issued", - } - - return buildPathIssuerSignSelfIssued(b, pattern, displayAttrs) + return buildPathIssuerSignSelfIssued(b, pattern) } func pathSignSelfIssued(b *backend) *framework.Path { pattern := "root/sign-self-issued" - - displayAttrs := &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKIRoot, - OperationVerb: "sign", - OperationSuffix: "self-issued", - } - - return buildPathIssuerSignSelfIssued(b, pattern, displayAttrs) + return buildPathIssuerSignSelfIssued(b, pattern) } -func buildPathIssuerSignSelfIssued(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path { +func buildPathIssuerSignSelfIssued(b *backend, pattern string) *framework.Path { fields := map[string]*framework.FieldSchema{ "certificate": { Type: framework.TypeString, @@ -201,29 +135,11 @@ func buildPathIssuerSignSelfIssued(b *backend, pattern string, displayAttrs *fra } fields = addIssuerRefField(fields) path := &framework.Path{ - Pattern: pattern, - DisplayAttrs: displayAttrs, - Fields: fields, + Pattern: pattern, + Fields: fields, Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathIssuerSignSelfIssued, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "certificate": { - Type: framework.TypeString, - Description: `Certificate`, - Required: true, - }, - "issuing_ca": { - Type: framework.TypeString, - Description: `Issuing CA`, - Required: true, - }, - }, - }}, - }, }, }, diff --git a/builtin/logical/pki/path_tidy.go b/builtin/logical/pki/path_tidy.go index 22c406249c3c77..8bd22da10f6f1d 100644 --- a/builtin/logical/pki/path_tidy.go +++ b/builtin/logical/pki/path_tidy.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -35,11 +32,8 @@ const ( type tidyStatus struct { // Parameters used to initiate the operation - safetyBuffer int - issuerSafetyBuffer int - revQueueSafetyBuffer int - acmeAccountSafetyBuffer int - + safetyBuffer int + issuerSafetyBuffer int tidyCertStore bool tidyRevokedCerts bool tidyRevokedAssocs bool @@ -47,7 +41,6 @@ type tidyStatus struct { tidyBackupBundle bool tidyRevocationQueue bool tidyCrossRevokedCerts bool - tidyAcme bool pauseDuration string // Status @@ -64,87 +57,47 @@ type tidyStatus struct { missingIssuerCertCount uint revQueueDeletedCount uint crossRevokedDeletedCount uint - - acmeAccountsCount uint - acmeAccountsRevokedCount uint - acmeAccountsDeletedCount uint - acmeOrdersDeletedCount uint } type tidyConfig struct { - // AutoTidy config - Enabled bool `json:"enabled"` - Interval time.Duration `json:"interval_duration"` - - // Tidy Operations - CertStore bool `json:"tidy_cert_store"` - RevokedCerts bool `json:"tidy_revoked_certs"` - IssuerAssocs bool `json:"tidy_revoked_cert_issuer_associations"` - ExpiredIssuers bool `json:"tidy_expired_issuers"` - BackupBundle bool `json:"tidy_move_legacy_ca_bundle"` - RevocationQueue bool `json:"tidy_revocation_queue"` - CrossRevokedCerts bool `json:"tidy_cross_cluster_revoked_certs"` - TidyAcme bool `json:"tidy_acme"` - - // Safety Buffers - SafetyBuffer time.Duration `json:"safety_buffer"` - IssuerSafetyBuffer time.Duration `json:"issuer_safety_buffer"` - QueueSafetyBuffer time.Duration `json:"revocation_queue_safety_buffer"` - AcmeAccountSafetyBuffer time.Duration `json:"acme_account_safety_buffer"` - PauseDuration time.Duration `json:"pause_duration"` - - // Metrics. - MaintainCount bool `json:"maintain_stored_certificate_counts"` - PublishMetrics bool `json:"publish_stored_certificate_count_metrics"` -} - -func (tc *tidyConfig) IsAnyTidyEnabled() bool { - return tc.CertStore || tc.RevokedCerts || tc.IssuerAssocs || tc.ExpiredIssuers || tc.BackupBundle || tc.TidyAcme || tc.CrossRevokedCerts || tc.RevocationQueue -} - -func (tc *tidyConfig) AnyTidyConfig() string { - return "tidy_cert_store / tidy_revoked_certs / tidy_revoked_cert_issuer_associations / tidy_expired_issuers / tidy_move_legacy_ca_bundle / tidy_revocation_queue / tidy_cross_cluster_revoked_certs / tidy_acme" + Enabled bool `json:"enabled"` + Interval time.Duration `json:"interval_duration"` + CertStore bool `json:"tidy_cert_store"` + RevokedCerts bool `json:"tidy_revoked_certs"` + IssuerAssocs bool `json:"tidy_revoked_cert_issuer_associations"` + ExpiredIssuers bool `json:"tidy_expired_issuers"` + BackupBundle bool `json:"tidy_move_legacy_ca_bundle"` + SafetyBuffer time.Duration `json:"safety_buffer"` + IssuerSafetyBuffer time.Duration `json:"issuer_safety_buffer"` + PauseDuration time.Duration `json:"pause_duration"` + RevocationQueue bool `json:"tidy_revocation_queue"` + QueueSafetyBuffer time.Duration `json:"revocation_queue_safety_buffer"` + CrossRevokedCerts bool `json:"tidy_cross_cluster_revoked_certs"` } var defaultTidyConfig = tidyConfig{ - Enabled: false, - Interval: 12 * time.Hour, - CertStore: false, - RevokedCerts: false, - IssuerAssocs: false, - ExpiredIssuers: false, - BackupBundle: false, - TidyAcme: false, - SafetyBuffer: 72 * time.Hour, - IssuerSafetyBuffer: 365 * 24 * time.Hour, - AcmeAccountSafetyBuffer: 30 * 24 * time.Hour, - PauseDuration: 0 * time.Second, - MaintainCount: false, - PublishMetrics: false, - RevocationQueue: false, - QueueSafetyBuffer: 48 * time.Hour, - CrossRevokedCerts: false, + Enabled: false, + Interval: 12 * time.Hour, + CertStore: false, + RevokedCerts: false, + IssuerAssocs: false, + ExpiredIssuers: false, + BackupBundle: false, + SafetyBuffer: 72 * time.Hour, + IssuerSafetyBuffer: 365 * 24 * time.Hour, + PauseDuration: 0 * time.Second, + RevocationQueue: false, + QueueSafetyBuffer: 48 * time.Hour, + CrossRevokedCerts: false, } func pathTidy(b *backend) *framework.Path { return &framework.Path{ Pattern: "tidy$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "tidy", - }, - - Fields: addTidyFields(map[string]*framework.FieldSchema{}), + Fields: addTidyFields(map[string]*framework.FieldSchema{}), Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathTidyWrite, - Responses: map[int][]framework.Response{ - http.StatusAccepted: {{ - Description: "Accepted", - Fields: map[string]*framework.FieldSchema{}, - }}, - }, + Callback: b.pathTidyWrite, ForwardPerformanceStandby: true, }, }, @@ -156,171 +109,9 @@ func pathTidy(b *backend) *framework.Path { func pathTidyCancel(b *backend) *framework.Path { return &framework.Path{ Pattern: "tidy-cancel$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "tidy", - OperationSuffix: "cancel", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathTidyCancelWrite, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "safety_buffer": { - Type: framework.TypeInt, - Description: `Safety buffer time duration`, - Required: false, - }, - "issuer_safety_buffer": { - Type: framework.TypeInt, - Description: `Issuer safety buffer`, - Required: false, - }, - "revocation_queue_safety_buffer": { - Type: framework.TypeInt, - Description: `Revocation queue safety buffer`, - Required: true, - }, - "tidy_cert_store": { - Type: framework.TypeBool, - Description: `Tidy certificate store`, - Required: false, - }, - "tidy_revoked_certs": { - Type: framework.TypeBool, - Description: `Tidy revoked certificates`, - Required: false, - }, - "tidy_revoked_cert_issuer_associations": { - Type: framework.TypeBool, - Description: `Tidy revoked certificate issuer associations`, - Required: false, - }, - "tidy_acme": { - Type: framework.TypeBool, - Description: `Tidy Unused Acme Accounts, and Orders`, - Required: false, - }, - "acme_account_safety_buffer": { - Type: framework.TypeInt, - Description: `Safety buffer after creation after which accounts lacking orders are revoked`, - Required: false, - }, - "tidy_expired_issuers": { - Type: framework.TypeBool, - Description: `Tidy expired issuers`, - Required: false, - }, - "pause_duration": { - Type: framework.TypeString, - Description: `Duration to pause between tidying certificates`, - Required: false, - }, - "state": { - Type: framework.TypeString, - Description: `One of Inactive, Running, Finished, or Error`, - Required: false, - }, - "error": { - Type: framework.TypeString, - Description: `The error message`, - Required: false, - }, - "time_started": { - Type: framework.TypeString, - Description: `Time the operation started`, - Required: false, - }, - "time_finished": { - Type: framework.TypeString, - Description: `Time the operation finished`, - Required: false, - }, - "last_auto_tidy_finished": { - Type: framework.TypeString, - Description: `Time the last auto-tidy operation finished`, - Required: true, - }, - "message": { - Type: framework.TypeString, - Description: `Message of the operation`, - Required: false, - }, - "cert_store_deleted_count": { - Type: framework.TypeInt, - Description: `The number of certificate storage entries deleted`, - Required: false, - }, - "revoked_cert_deleted_count": { - Type: framework.TypeInt, - Description: `The number of revoked certificate entries deleted`, - Required: false, - }, - "current_cert_store_count": { - Type: framework.TypeInt, - Description: `The number of revoked certificate entries deleted`, - Required: false, - }, - "current_revoked_cert_count": { - Type: framework.TypeInt, - Description: `The number of revoked certificate entries deleted`, - Required: false, - }, - "missing_issuer_cert_count": { - Type: framework.TypeInt, - Required: false, - }, - "tidy_move_legacy_ca_bundle": { - Type: framework.TypeBool, - Required: false, - }, - "tidy_cross_cluster_revoked_certs": { - Type: framework.TypeBool, - Required: false, - }, - "tidy_revocation_queue": { - Type: framework.TypeBool, - Required: false, - }, - "revocation_queue_deleted_count": { - Type: framework.TypeInt, - Required: false, - }, - "cross_revoked_cert_deleted_count": { - Type: framework.TypeInt, - Required: false, - }, - "internal_backend_uuid": { - Type: framework.TypeString, - Required: false, - }, - "total_acme_account_count": { - Type: framework.TypeInt, - Description: `Total number of acme accounts iterated over`, - Required: false, - }, - "acme_account_deleted_count": { - Type: framework.TypeInt, - Description: `The number of revoked acme accounts removed`, - Required: false, - }, - "acme_account_revoked_count": { - Type: framework.TypeInt, - Description: `The number of unused acme accounts revoked`, - Required: false, - }, - "acme_orders_deleted_count": { - Type: framework.TypeInt, - Description: `The number of expired, unused acme orders removed`, - Required: false, - }, - }, - }}, - }, + Callback: b.pathTidyCancelWrite, ForwardPerformanceStandby: true, }, }, @@ -332,173 +123,9 @@ func pathTidyCancel(b *backend) *framework.Path { func pathTidyStatus(b *backend) *framework.Path { return &framework.Path{ Pattern: "tidy-status$", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - OperationVerb: "tidy", - OperationSuffix: "status", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathTidyStatusRead, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "safety_buffer": { - Type: framework.TypeInt, - Description: `Safety buffer time duration`, - Required: true, - }, - "issuer_safety_buffer": { - Type: framework.TypeInt, - Description: `Issuer safety buffer`, - Required: true, - }, - "revocation_queue_safety_buffer": { - Type: framework.TypeInt, - Description: `Revocation queue safety buffer`, - Required: true, - }, - "acme_account_safety_buffer": { - Type: framework.TypeInt, - Description: `Safety buffer after creation after which accounts lacking orders are revoked`, - Required: false, - }, - "tidy_cert_store": { - Type: framework.TypeBool, - Description: `Tidy certificate store`, - Required: true, - }, - "tidy_revoked_certs": { - Type: framework.TypeBool, - Description: `Tidy revoked certificates`, - Required: true, - }, - "tidy_revoked_cert_issuer_associations": { - Type: framework.TypeBool, - Description: `Tidy revoked certificate issuer associations`, - Required: true, - }, - "tidy_expired_issuers": { - Type: framework.TypeBool, - Description: `Tidy expired issuers`, - Required: true, - }, - "tidy_cross_cluster_revoked_certs": { - Type: framework.TypeString, - Description: ``, - Required: false, - }, - "tidy_acme": { - Type: framework.TypeBool, - Description: `Tidy Unused Acme Accounts, and Orders`, - Required: true, - }, - "pause_duration": { - Type: framework.TypeString, - Description: `Duration to pause between tidying certificates`, - Required: true, - }, - "state": { - Type: framework.TypeString, - Description: `One of Inactive, Running, Finished, or Error`, - Required: true, - }, - "error": { - Type: framework.TypeString, - Description: `The error message`, - Required: true, - }, - "time_started": { - Type: framework.TypeString, - Description: `Time the operation started`, - Required: true, - }, - "time_finished": { - Type: framework.TypeString, - Description: `Time the operation finished`, - Required: false, - }, - "last_auto_tidy_finished": { - Type: framework.TypeString, - Description: `Time the last auto-tidy operation finished`, - Required: true, - }, - "message": { - Type: framework.TypeString, - Description: `Message of the operation`, - Required: true, - }, - "cert_store_deleted_count": { - Type: framework.TypeInt, - Description: `The number of certificate storage entries deleted`, - Required: true, - }, - "revoked_cert_deleted_count": { - Type: framework.TypeInt, - Description: `The number of revoked certificate entries deleted`, - Required: true, - }, - "current_cert_store_count": { - Type: framework.TypeInt, - Description: `The number of revoked certificate entries deleted`, - Required: true, - }, - "cross_revoked_cert_deleted_count": { - Type: framework.TypeInt, - Description: ``, - Required: true, - }, - "current_revoked_cert_count": { - Type: framework.TypeInt, - Description: `The number of revoked certificate entries deleted`, - Required: true, - }, - "revocation_queue_deleted_count": { - Type: framework.TypeInt, - Required: true, - }, - "tidy_move_legacy_ca_bundle": { - Type: framework.TypeBool, - Required: true, - }, - "tidy_revocation_queue": { - Type: framework.TypeBool, - Required: true, - }, - "missing_issuer_cert_count": { - Type: framework.TypeInt, - Required: true, - }, - "internal_backend_uuid": { - Type: framework.TypeString, - Required: true, - }, - "total_acme_account_count": { - Type: framework.TypeInt, - Description: `Total number of acme accounts iterated over`, - Required: false, - }, - "acme_account_deleted_count": { - Type: framework.TypeInt, - Description: `The number of revoked acme accounts removed`, - Required: false, - }, - "acme_account_revoked_count": { - Type: framework.TypeInt, - Description: `The number of unused acme accounts revoked`, - Required: false, - }, - "acme_orders_deleted_count": { - Type: framework.TypeInt, - Description: `The number of expired, unused acme orders removed`, - Required: false, - }, - }, - }}, - }, + Callback: b.pathTidyStatusRead, ForwardPerformanceStandby: true, }, }, @@ -510,9 +137,6 @@ func pathTidyStatus(b *backend) *framework.Path { func pathConfigAutoTidy(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/auto-tidy", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixPKI, - }, Fields: addTidyFields(map[string]*framework.FieldSchema{ "enabled": { Type: framework.TypeBool, @@ -523,207 +147,13 @@ func pathConfigAutoTidy(b *backend) *framework.Path { Description: `Interval at which to run an auto-tidy operation. This is the time between tidy invocations (after one finishes to the start of the next). Running a manual tidy will reset this duration.`, Default: int(defaultTidyConfig.Interval / time.Second), // TypeDurationSecond currently requires the default to be an int. }, - "maintain_stored_certificate_counts": { - Type: framework.TypeBool, - Description: `This configures whether stored certificates -are counted upon initialization of the backend, and whether during -normal operation, a running count of certificates stored is maintained.`, - Default: false, - }, - "publish_stored_certificate_count_metrics": { - Type: framework.TypeBool, - Description: `This configures whether the stored certificate -count is published to the metrics consumer. It does not affect if the -stored certificate count is maintained, and if maintained, it will be -available on the tidy-status endpoint.`, - Default: false, - }, }), Operations: map[logical.Operation]framework.OperationHandler{ logical.ReadOperation: &framework.PathOperation{ Callback: b.pathConfigAutoTidyRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "auto-tidy-configuration", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "enabled": { - Type: framework.TypeBool, - Description: `Specifies whether automatic tidy is enabled or not`, - Required: true, - }, - "interval_duration": { - Type: framework.TypeInt, - Description: `Specifies the duration between automatic tidy operation`, - Required: true, - }, - "tidy_cert_store": { - Type: framework.TypeBool, - Description: `Specifies whether to tidy up the certificate store`, - Required: true, - }, - "tidy_revoked_certs": { - Type: framework.TypeBool, - Description: `Specifies whether to remove all invalid and expired certificates from storage`, - Required: true, - }, - "tidy_revoked_cert_issuer_associations": { - Type: framework.TypeBool, - Description: `Specifies whether to associate revoked certificates with their corresponding issuers`, - Required: true, - }, - "tidy_expired_issuers": { - Type: framework.TypeBool, - Description: `Specifies whether tidy expired issuers`, - Required: true, - }, - "tidy_acme": { - Type: framework.TypeBool, - Description: `Tidy Unused Acme Accounts, and Orders`, - Required: true, - }, - "safety_buffer": { - Type: framework.TypeInt, - Description: `Safety buffer time duration`, - Required: true, - }, - "issuer_safety_buffer": { - Type: framework.TypeInt, - Description: `Issuer safety buffer`, - Required: true, - }, - "acme_account_safety_buffer": { - Type: framework.TypeInt, - Description: `Safety buffer after creation after which accounts lacking orders are revoked`, - Required: false, - }, - "pause_duration": { - Type: framework.TypeString, - Description: `Duration to pause between tidying certificates`, - Required: true, - }, - "tidy_move_legacy_ca_bundle": { - Type: framework.TypeBool, - Required: true, - }, - "tidy_cross_cluster_revoked_certs": { - Type: framework.TypeBool, - Required: true, - }, - "tidy_revocation_queue": { - Type: framework.TypeBool, - Required: true, - }, - "revocation_queue_safety_buffer": { - Type: framework.TypeDurationSecond, - Required: true, - }, - "publish_stored_certificate_count_metrics": { - Type: framework.TypeBool, - Required: true, - }, - "maintain_stored_certificate_counts": { - Type: framework.TypeBool, - Required: true, - }, - }, - }}, - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathConfigAutoTidyWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "auto-tidy", - }, - Responses: map[int][]framework.Response{ - http.StatusOK: {{ - Description: "OK", - Fields: map[string]*framework.FieldSchema{ - "enabled": { - Type: framework.TypeBool, - Description: `Specifies whether automatic tidy is enabled or not`, - Required: true, - }, - "interval_duration": { - Type: framework.TypeInt, - Description: `Specifies the duration between automatic tidy operation`, - Required: true, - }, - "tidy_cert_store": { - Type: framework.TypeBool, - Description: `Specifies whether to tidy up the certificate store`, - Required: true, - }, - "tidy_revoked_certs": { - Type: framework.TypeBool, - Description: `Specifies whether to remove all invalid and expired certificates from storage`, - Required: true, - }, - "tidy_revoked_cert_issuer_associations": { - Type: framework.TypeBool, - Description: `Specifies whether to associate revoked certificates with their corresponding issuers`, - Required: true, - }, - "tidy_expired_issuers": { - Type: framework.TypeBool, - Description: `Specifies whether tidy expired issuers`, - Required: true, - }, - "tidy_acme": { - Type: framework.TypeBool, - Description: `Tidy Unused Acme Accounts, and Orders`, - Required: true, - }, - "safety_buffer": { - Type: framework.TypeInt, - Description: `Safety buffer time duration`, - Required: true, - }, - "issuer_safety_buffer": { - Type: framework.TypeInt, - Description: `Issuer safety buffer`, - Required: true, - }, - "acme_account_safety_buffer": { - Type: framework.TypeInt, - Description: `Safety buffer after creation after which accounts lacking orders are revoked`, - Required: true, - }, - "pause_duration": { - Type: framework.TypeString, - Description: `Duration to pause between tidying certificates`, - Required: true, - }, - "tidy_cross_cluster_revoked_certs": { - Type: framework.TypeBool, - Required: true, - }, - "tidy_revocation_queue": { - Type: framework.TypeBool, - Required: true, - }, - "tidy_move_legacy_ca_bundle": { - Type: framework.TypeBool, - Required: true, - }, - "revocation_queue_safety_buffer": { - Type: framework.TypeDurationSecond, - Required: true, - }, - "publish_stored_certificate_count_metrics": { - Type: framework.TypeBool, - Required: true, - }, - "maintain_stored_certificate_counts": { - Type: framework.TypeBool, - Required: true, - }, - }, - }}, - }, // Read more about why these flags are set in backend.go. ForwardPerformanceStandby: true, ForwardPerformanceSecondary: true, @@ -747,8 +177,6 @@ func (b *backend) pathTidyWrite(ctx context.Context, req *logical.Request, d *fr tidyRevocationQueue := d.Get("tidy_revocation_queue").(bool) queueSafetyBuffer := d.Get("revocation_queue_safety_buffer").(int) tidyCrossRevokedCerts := d.Get("tidy_cross_cluster_revoked_certs").(bool) - tidyAcme := d.Get("tidy_acme").(bool) - acmeAccountSafetyBuffer := d.Get("acme_account_safety_buffer").(int) if safetyBuffer < 1 { return logical.ErrorResponse("safety_buffer must be greater than zero"), nil @@ -762,10 +190,6 @@ func (b *backend) pathTidyWrite(ctx context.Context, req *logical.Request, d *fr return logical.ErrorResponse("revocation_queue_safety_buffer must be greater than zero"), nil } - if acmeAccountSafetyBuffer < 1 { - return logical.ErrorResponse("acme_account_safety_buffer must be greater than zero"), nil - } - if pauseDurationStr != "" { var err error pauseDuration, err = time.ParseDuration(pauseDurationStr) @@ -781,25 +205,22 @@ func (b *backend) pathTidyWrite(ctx context.Context, req *logical.Request, d *fr bufferDuration := time.Duration(safetyBuffer) * time.Second issuerBufferDuration := time.Duration(issuerSafetyBuffer) * time.Second queueSafetyBufferDuration := time.Duration(queueSafetyBuffer) * time.Second - acmeAccountSafetyBufferDuration := time.Duration(acmeAccountSafetyBuffer) * time.Second // Manual run with constructed configuration. config := &tidyConfig{ - Enabled: true, - Interval: 0 * time.Second, - CertStore: tidyCertStore, - RevokedCerts: tidyRevokedCerts, - IssuerAssocs: tidyRevokedAssocs, - ExpiredIssuers: tidyExpiredIssuers, - BackupBundle: tidyBackupBundle, - SafetyBuffer: bufferDuration, - IssuerSafetyBuffer: issuerBufferDuration, - PauseDuration: pauseDuration, - RevocationQueue: tidyRevocationQueue, - QueueSafetyBuffer: queueSafetyBufferDuration, - CrossRevokedCerts: tidyCrossRevokedCerts, - TidyAcme: tidyAcme, - AcmeAccountSafetyBuffer: acmeAccountSafetyBufferDuration, + Enabled: true, + Interval: 0 * time.Second, + CertStore: tidyCertStore, + RevokedCerts: tidyRevokedCerts, + IssuerAssocs: tidyRevokedAssocs, + ExpiredIssuers: tidyExpiredIssuers, + BackupBundle: tidyBackupBundle, + SafetyBuffer: bufferDuration, + IssuerSafetyBuffer: issuerBufferDuration, + PauseDuration: pauseDuration, + RevocationQueue: tidyRevocationQueue, + QueueSafetyBuffer: queueSafetyBufferDuration, + CrossRevokedCerts: tidyCrossRevokedCerts, } if !atomic.CompareAndSwapUint32(b.tidyCASGuard, 0, 1) { @@ -824,8 +245,8 @@ func (b *backend) pathTidyWrite(ctx context.Context, req *logical.Request, d *fr b.startTidyOperation(req, config) resp := &logical.Response{} - if !config.IsAnyTidyEnabled() { - resp.AddWarning("Manual tidy requested but no tidy operations were set. Enable at least one tidy operation to be run (" + config.AnyTidyConfig() + ").") + if !tidyCertStore && !tidyRevokedCerts && !tidyRevokedAssocs && !tidyExpiredIssuers && !tidyBackupBundle && !tidyRevocationQueue && !tidyCrossRevokedCerts { + resp.AddWarning("No targets to tidy; specify tidy_cert_store=true or tidy_revoked_certs=true or tidy_revoked_cert_issuer_associations=true or tidy_expired_issuers=true or tidy_move_legacy_ca_bundle=true or tidy_revocation_queue=true or tidy_cross_cluster_revoked_certs=true to start a tidy operation.") } else { resp.AddWarning("Tidy operation successfully started. Any information from the operation will be printed to Vault's server logs.") } @@ -915,17 +336,6 @@ func (b *backend) startTidyOperation(req *logical.Request, config *tidyConfig) { } } - // Check for cancel before continuing. - if atomic.CompareAndSwapUint32(b.tidyCancelCAS, 1, 0) { - return tidyCancelledError - } - - if config.TidyAcme { - if err := b.doTidyAcme(ctx, req, logger, config); err != nil { - return err - } - } - return nil } @@ -1148,17 +558,9 @@ func (b *backend) doTidyRevocationStore(ctx context.Context, req *logical.Reques } if !config.AutoRebuild { - warnings, err := b.crlBuilder.rebuild(sc, false) - if err != nil { + if err := b.crlBuilder.rebuild(sc, false); err != nil { return err } - if len(warnings) > 0 { - msg := "During rebuild of CRL for tidy, got the following warnings:" - for index, warning := range warnings { - msg = fmt.Sprintf("%v\n %d. %v", msg, index+1, warning) - } - b.Logger().Warn(msg) - } } } @@ -1260,17 +662,9 @@ func (b *backend) doTidyExpiredIssuers(ctx context.Context, req *logical.Request b.revokeStorageLock.Lock() defer b.revokeStorageLock.Unlock() - warnings, err := b.crlBuilder.rebuild(sc, false) - if err != nil { + if err := b.crlBuilder.rebuild(sc, false); err != nil { return err } - if len(warnings) > 0 { - msg := "During rebuild of CRL for tidy, got the following warnings:" - for index, warning := range warnings { - msg = fmt.Sprintf("%v\n %d. %v", msg, index+1, warning) - } - b.Logger().Warn(msg) - } } return nil @@ -1530,90 +924,6 @@ func (b *backend) doTidyCrossRevocationStore(ctx context.Context, req *logical.R return nil } -func (b *backend) doTidyAcme(ctx context.Context, req *logical.Request, logger hclog.Logger, config *tidyConfig) error { - b.acmeAccountLock.Lock() - defer b.acmeAccountLock.Unlock() - - sc := b.makeStorageContext(ctx, req.Storage) - thumbprints, err := sc.Storage.List(ctx, acmeThumbprintPrefix) - if err != nil { - return err - } - - b.tidyStatusLock.Lock() - b.tidyStatus.acmeAccountsCount = uint(len(thumbprints)) - b.tidyStatusLock.Unlock() - - baseUrl, _, err := getAcmeBaseUrl(sc, req) - if err != nil { - return err - } - - acmeCtx := &acmeContext{ - baseUrl: baseUrl, - sc: sc, - } - - for _, thumbprint := range thumbprints { - err := b.tidyAcmeAccountByThumbprint(b.acmeState, acmeCtx, thumbprint, config.SafetyBuffer, config.AcmeAccountSafetyBuffer) - if err != nil { - logger.Warn("error tidying account %v: %v", thumbprint, err.Error()) - } - - // Check for cancel before continuing. - if atomic.CompareAndSwapUint32(b.tidyCancelCAS, 1, 0) { - return tidyCancelledError - } - - // Check for pause duration to reduce resource consumption. - if config.PauseDuration > (0 * time.Second) { - b.acmeAccountLock.Unlock() // Correct the Lock - time.Sleep(config.PauseDuration) - b.acmeAccountLock.Lock() - } - - } - - // Clean up any unused EAB - eabIds, err := b.acmeState.ListEabIds(sc) - if err != nil { - return fmt.Errorf("failed listing EAB ids: %w", err) - } - - for _, eabId := range eabIds { - eab, err := b.acmeState.LoadEab(sc, eabId) - if err != nil { - if errors.Is(err, ErrStorageItemNotFound) { - // We don't need to worry about a consumed EAB - continue - } - return err - } - - eabExpiration := eab.CreatedOn.Add(config.AcmeAccountSafetyBuffer) - if time.Now().After(eabExpiration) { - _, err := b.acmeState.DeleteEab(sc, eabId) - if err != nil { - return fmt.Errorf("failed to tidy eab %s: %w", eabId, err) - } - } - - // Check for cancel before continuing. - if atomic.CompareAndSwapUint32(b.tidyCancelCAS, 1, 0) { - return tidyCancelledError - } - - // Check for pause duration to reduce resource consumption. - if config.PauseDuration > (0 * time.Second) { - b.acmeAccountLock.Unlock() // Correct the Lock - time.Sleep(config.PauseDuration) - b.acmeAccountLock.Lock() - } - } - - return nil -} - func (b *backend) pathTidyCancelWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) { if atomic.LoadUint32(b.tidyCASGuard) == 0 { resp := &logical.Response{} @@ -1652,7 +962,6 @@ func (b *backend) pathTidyStatusRead(_ context.Context, _ *logical.Request, _ *f "tidy_move_legacy_ca_bundle": nil, "tidy_revocation_queue": nil, "tidy_cross_cluster_revoked_certs": nil, - "tidy_acme": nil, "pause_duration": nil, "state": "Inactive", "error": nil, @@ -1664,31 +973,11 @@ func (b *backend) pathTidyStatusRead(_ context.Context, _ *logical.Request, _ *f "missing_issuer_cert_count": nil, "current_cert_store_count": nil, "current_revoked_cert_count": nil, - "internal_backend_uuid": nil, "revocation_queue_deleted_count": nil, "cross_revoked_cert_deleted_count": nil, - "total_acme_account_count": nil, - "acme_account_deleted_count": nil, - "acme_account_revoked_count": nil, - "acme_orders_deleted_count": nil, - "acme_account_safety_buffer": nil, }, } - resp.Data["internal_backend_uuid"] = b.backendUUID - - if b.certCountEnabled.Load() { - resp.Data["current_cert_store_count"] = b.certCount.Load() - resp.Data["current_revoked_cert_count"] = b.revokedCertCount.Load() - if !b.certsCounted.Load() { - resp.AddWarning("Certificates in storage are still being counted, current counts provided may be " + - "inaccurate") - } - if b.certCountError != "" { - resp.Data["certificate_counting_error"] = b.certCountError - } - } - if b.tidyStatus.state == tidyStatusInactive { return resp, nil } @@ -1702,7 +991,6 @@ func (b *backend) pathTidyStatusRead(_ context.Context, _ *logical.Request, _ *f resp.Data["tidy_move_legacy_ca_bundle"] = b.tidyStatus.tidyBackupBundle resp.Data["tidy_revocation_queue"] = b.tidyStatus.tidyRevocationQueue resp.Data["tidy_cross_cluster_revoked_certs"] = b.tidyStatus.tidyCrossRevokedCerts - resp.Data["tidy_acme"] = b.tidyStatus.tidyAcme resp.Data["pause_duration"] = b.tidyStatus.pauseDuration resp.Data["time_started"] = b.tidyStatus.timeStarted resp.Data["message"] = b.tidyStatus.message @@ -1711,13 +999,6 @@ func (b *backend) pathTidyStatusRead(_ context.Context, _ *logical.Request, _ *f resp.Data["missing_issuer_cert_count"] = b.tidyStatus.missingIssuerCertCount resp.Data["revocation_queue_deleted_count"] = b.tidyStatus.revQueueDeletedCount resp.Data["cross_revoked_cert_deleted_count"] = b.tidyStatus.crossRevokedDeletedCount - resp.Data["revocation_queue_safety_buffer"] = b.tidyStatus.revQueueSafetyBuffer - resp.Data["last_auto_tidy_finished"] = b.lastTidy - resp.Data["total_acme_account_count"] = b.tidyStatus.acmeAccountsCount - resp.Data["acme_account_deleted_count"] = b.tidyStatus.acmeAccountsDeletedCount - resp.Data["acme_account_revoked_count"] = b.tidyStatus.acmeAccountsRevokedCount - resp.Data["acme_orders_deleted_count"] = b.tidyStatus.acmeOrdersDeletedCount - resp.Data["acme_account_safety_buffer"] = b.tidyStatus.acmeAccountSafetyBuffer switch b.tidyStatus.state { case tidyStatusStarted: @@ -1739,6 +1020,14 @@ func (b *backend) pathTidyStatusRead(_ context.Context, _ *logical.Request, _ *f resp.Data["time_finished"] = b.tidyStatus.timeFinished } + resp.Data["current_cert_store_count"] = atomic.LoadUint32(b.certCount) + resp.Data["current_revoked_cert_count"] = atomic.LoadUint32(b.revokedCertCount) + + if !b.certsCounted.Load() { + resp.AddWarning("Certificates in storage are still being counted, current counts provided may be " + + "inaccurate") + } + return resp, nil } @@ -1750,7 +1039,21 @@ func (b *backend) pathConfigAutoTidyRead(ctx context.Context, req *logical.Reque } return &logical.Response{ - Data: getTidyConfigData(*config), + Data: map[string]interface{}{ + "enabled": config.Enabled, + "interval_duration": int(config.Interval / time.Second), + "tidy_cert_store": config.CertStore, + "tidy_revoked_certs": config.RevokedCerts, + "tidy_revoked_cert_issuer_associations": config.IssuerAssocs, + "tidy_expired_issuers": config.ExpiredIssuers, + "tidy_move_legacy_ca_bundle": config.BackupBundle, + "safety_buffer": int(config.SafetyBuffer / time.Second), + "issuer_safety_buffer": int(config.IssuerSafetyBuffer / time.Second), + "pause_duration": config.PauseDuration.String(), + "tidy_revocation_queue": config.RevocationQueue, + "revocation_queue_safety_buffer": int(config.QueueSafetyBuffer / time.Second), + "tidy_cross_cluster_revoked_certs": config.CrossRevokedCerts, + }, }, nil } @@ -1832,24 +1135,8 @@ func (b *backend) pathConfigAutoTidyWrite(ctx context.Context, req *logical.Requ config.CrossRevokedCerts = crossRevokedRaw.(bool) } - if tidyAcmeRaw, ok := d.GetOk("tidy_acme"); ok { - config.TidyAcme = tidyAcmeRaw.(bool) - } - - if config.Enabled && !config.IsAnyTidyEnabled() { - return logical.ErrorResponse("Auto-tidy enabled but no tidy operations were requested. Enable at least one tidy operation to be run (" + config.AnyTidyConfig() + ")."), nil - } - - if maintainCountEnabledRaw, ok := d.GetOk("maintain_stored_certificate_counts"); ok { - config.MaintainCount = maintainCountEnabledRaw.(bool) - } - - if runningStorageMetricsEnabledRaw, ok := d.GetOk("publish_stored_certificate_count_metrics"); ok { - config.PublishMetrics = runningStorageMetricsEnabledRaw.(bool) - } - - if config.PublishMetrics && !config.MaintainCount { - return logical.ErrorResponse("Can not publish a running storage metrics count to metrics without first maintaining that count. Enable `maintain_stored_certificate_counts` to enable `publish_stored_certificate_count_metrics`."), nil + if config.Enabled && !(config.CertStore || config.RevokedCerts || config.IssuerAssocs || config.ExpiredIssuers || config.BackupBundle || config.RevocationQueue || config.CrossRevokedCerts) { + return logical.ErrorResponse("Auto-tidy enabled but no tidy operations were requested. Enable at least one tidy operation to be run (tidy_cert_store / tidy_revoked_certs / tidy_revoked_cert_issuer_associations / tidy_expired_issuers / tidy_move_legacy_ca_bundle / tidy_revocation_queue / tidy_cross_cluster_revoked_certs)."), nil } if err := sc.writeAutoTidyConfig(config); err != nil { @@ -1857,7 +1144,21 @@ func (b *backend) pathConfigAutoTidyWrite(ctx context.Context, req *logical.Requ } return &logical.Response{ - Data: getTidyConfigData(*config), + Data: map[string]interface{}{ + "enabled": config.Enabled, + "interval_duration": int(config.Interval / time.Second), + "tidy_cert_store": config.CertStore, + "tidy_revoked_certs": config.RevokedCerts, + "tidy_revoked_cert_issuer_associations": config.IssuerAssocs, + "tidy_expired_issuers": config.ExpiredIssuers, + "tidy_move_legacy_ca_bundle": config.BackupBundle, + "safety_buffer": int(config.SafetyBuffer / time.Second), + "issuer_safety_buffer": int(config.IssuerSafetyBuffer / time.Second), + "pause_duration": config.PauseDuration.String(), + "tidy_revocation_queue": config.RevocationQueue, + "revocation_queue_safety_buffer": int(config.QueueSafetyBuffer / time.Second), + "tidy_cross_cluster_revoked_certs": config.CrossRevokedCerts, + }, }, nil } @@ -1866,19 +1167,16 @@ func (b *backend) tidyStatusStart(config *tidyConfig) { defer b.tidyStatusLock.Unlock() b.tidyStatus = &tidyStatus{ - safetyBuffer: int(config.SafetyBuffer / time.Second), - issuerSafetyBuffer: int(config.IssuerSafetyBuffer / time.Second), - revQueueSafetyBuffer: int(config.QueueSafetyBuffer / time.Second), - acmeAccountSafetyBuffer: int(config.AcmeAccountSafetyBuffer / time.Second), - tidyCertStore: config.CertStore, - tidyRevokedCerts: config.RevokedCerts, - tidyRevokedAssocs: config.IssuerAssocs, - tidyExpiredIssuers: config.ExpiredIssuers, - tidyBackupBundle: config.BackupBundle, - tidyRevocationQueue: config.RevocationQueue, - tidyCrossRevokedCerts: config.CrossRevokedCerts, - tidyAcme: config.TidyAcme, - pauseDuration: config.PauseDuration.String(), + safetyBuffer: int(config.SafetyBuffer / time.Second), + issuerSafetyBuffer: int(config.IssuerSafetyBuffer / time.Second), + tidyCertStore: config.CertStore, + tidyRevokedCerts: config.RevokedCerts, + tidyRevokedAssocs: config.IssuerAssocs, + tidyExpiredIssuers: config.ExpiredIssuers, + tidyBackupBundle: config.BackupBundle, + tidyRevocationQueue: config.RevocationQueue, + tidyCrossRevokedCerts: config.CrossRevokedCerts, + pauseDuration: config.PauseDuration.String(), state: tidyStatusStarted, timeStarted: time.Now(), @@ -1926,7 +1224,7 @@ func (b *backend) tidyStatusIncCertStoreCount() { b.tidyStatus.certStoreDeletedCount++ - b.ifCountEnabledDecrementTotalCertificatesCountReport() + b.decrementTotalCertificatesCountReport() } func (b *backend) tidyStatusIncRevokedCertCount() { @@ -1935,7 +1233,7 @@ func (b *backend) tidyStatusIncRevokedCertCount() { b.tidyStatus.revokedCertDeletedCount++ - b.ifCountEnabledDecrementTotalRevokedCertificatesCountReport() + b.decrementTotalRevokedCertificatesCountReport() } func (b *backend) tidyStatusIncMissingIssuerCertCount() { @@ -1959,27 +1257,6 @@ func (b *backend) tidyStatusIncCrossRevCertCount() { b.tidyStatus.crossRevokedDeletedCount++ } -func (b *backend) tidyStatusIncRevAcmeAccountCount() { - b.tidyStatusLock.Lock() - defer b.tidyStatusLock.Unlock() - - b.tidyStatus.acmeAccountsRevokedCount++ -} - -func (b *backend) tidyStatusIncDeletedAcmeAccountCount() { - b.tidyStatusLock.Lock() - defer b.tidyStatusLock.Unlock() - - b.tidyStatus.acmeAccountsDeletedCount++ -} - -func (b *backend) tidyStatusIncDelAcmeOrderCount() { - b.tidyStatusLock.Lock() - defer b.tidyStatusLock.Unlock() - - b.tidyStatus.acmeOrdersDeletedCount++ -} - const pathTidyHelpSyn = ` Tidy up the backend by removing expired certificates, revocation information, or both. @@ -2048,13 +1325,6 @@ The result includes the following fields: * 'revocation_queue_deleted_count': the number of revocation queue entries deleted * 'tidy_cross_cluster_revoked_certs': the value of this parameter when initiating the tidy operation * 'cross_revoked_cert_deleted_count': the number of cross-cluster revoked certificate entries deleted -* 'revocation_queue_safety_buffer': the value of this parameter when initiating the tidy operation -* 'tidy_acme': the value of this parameter when initiating the tidy operation -* 'acme_account_safety_buffer': the value of this parameter when initiating the tidy operation -* 'total_acme_account_count': the total number of acme accounts in the list to be iterated over -* 'acme_account_deleted_count': the number of revoked acme accounts deleted during the operation -* 'acme_account_revoked_count': the number of acme accounts revoked during the operation -* 'acme_orders_deleted_count': the number of acme orders deleted during the operation ` const pathConfigAutoTidySyn = ` @@ -2070,26 +1340,3 @@ controls the frequency of auto-tidy execution). Once enabled, a tidy operation will be kicked off automatically, as if it were executed with the posted configuration. ` - -func getTidyConfigData(config tidyConfig) map[string]interface{} { - return map[string]interface{}{ - // This map is in the same order as tidyConfig to ensure that all fields are accounted for - "enabled": config.Enabled, - "interval_duration": int(config.Interval / time.Second), - "tidy_cert_store": config.CertStore, - "tidy_revoked_certs": config.RevokedCerts, - "tidy_revoked_cert_issuer_associations": config.IssuerAssocs, - "tidy_expired_issuers": config.ExpiredIssuers, - "tidy_move_legacy_ca_bundle": config.BackupBundle, - "tidy_acme": config.TidyAcme, - "safety_buffer": int(config.SafetyBuffer / time.Second), - "issuer_safety_buffer": int(config.IssuerSafetyBuffer / time.Second), - "acme_account_safety_buffer": int(config.AcmeAccountSafetyBuffer / time.Second), - "pause_duration": config.PauseDuration.String(), - "publish_stored_certificate_count_metrics": config.PublishMetrics, - "maintain_stored_certificate_counts": config.MaintainCount, - "tidy_revocation_queue": config.RevocationQueue, - "revocation_queue_safety_buffer": int(config.QueueSafetyBuffer / time.Second), - "tidy_cross_cluster_revoked_certs": config.CrossRevokedCerts, - } -} diff --git a/builtin/logical/pki/path_tidy_test.go b/builtin/logical/pki/path_tidy_test.go index a5469ce003e586..45d3d3a6a5e7b2 100644 --- a/builtin/logical/pki/path_tidy_test.go +++ b/builtin/logical/pki/path_tidy_test.go @@ -1,21 +1,10 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( "encoding/json" - "errors" - "fmt" - "strings" "testing" "time" - "github.com/hashicorp/vault/helper/testhelpers" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" - - "github.com/armon/go-metrics" - "github.com/hashicorp/vault/api" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/logical" @@ -24,41 +13,6 @@ import ( "github.com/stretchr/testify/require" ) -func TestTidyConfigs(t *testing.T) { - t.Parallel() - - var cfg tidyConfig - operations := strings.Split(cfg.AnyTidyConfig(), " / ") - t.Logf("Got tidy operations: %v", operations) - - for _, operation := range operations { - b, s := CreateBackendWithStorage(t) - - resp, err := CBWrite(b, s, "config/auto-tidy", map[string]interface{}{ - "enabled": true, - operation: true, - }) - requireSuccessNonNilResponse(t, resp, err, "expected to be able to enable auto-tidy operation "+operation) - - resp, err = CBRead(b, s, "config/auto-tidy") - requireSuccessNonNilResponse(t, resp, err, "expected to be able to read auto-tidy operation for operation "+operation) - require.True(t, resp.Data[operation].(bool), "expected operation to be enabled after reading auto-tidy config "+operation) - - resp, err = CBWrite(b, s, "tidy", map[string]interface{}{ - operation: true, - }) - requireSuccessNonNilResponse(t, resp, err, "expected to be able to start tidy operation with "+operation) - if len(resp.Warnings) > 0 { - t.Logf("got warnings while starting manual tidy: %v", resp.Warnings) - for _, warning := range resp.Warnings { - if strings.Contains(warning, "Manual tidy requested but no tidy operations were set.") { - t.Fatalf("expected to be able to enable tidy operation with just %v but got warning: %v / (resp=%v)", operation, warning, resp) - } - } - } - } -} - func TestAutoTidy(t *testing.T) { t.Parallel() @@ -264,19 +218,17 @@ func TestTidyCancellation(t *testing.T) { // Kick off a tidy operation (which runs in the background), but with // a slow-ish pause between certificates. - resp, err := CBWrite(b, s, "tidy", map[string]interface{}{ + _, err = CBWrite(b, s, "tidy", map[string]interface{}{ "tidy_cert_store": true, "safety_buffer": "1s", "pause_duration": "1s", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("tidy"), logical.UpdateOperation), resp, true) // If we wait six seconds, the operation should still be running. That's // how we check that pause_duration works. time.Sleep(3 * time.Second) - resp, err = CBRead(b, s, "tidy-status") - + resp, err := CBRead(b, s, "tidy-status") require.NoError(t, err) require.NotNil(t, resp) require.NotNil(t, resp.Data) @@ -284,7 +236,6 @@ func TestTidyCancellation(t *testing.T) { // If we now cancel the operation, the response should say Cancelling. cancelResp, err := CBWrite(b, s, "tidy-cancel", map[string]interface{}{}) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("tidy-cancel"), logical.UpdateOperation), resp, true) require.NoError(t, err) require.NotNil(t, cancelResp) require.NotNil(t, cancelResp.Data) @@ -304,7 +255,6 @@ func TestTidyCancellation(t *testing.T) { time.Sleep(3 * time.Second) statusResp, err := CBRead(b, s, "tidy-status") - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("tidy-status"), logical.ReadOperation), resp, true) require.NoError(t, err) require.NotNil(t, statusResp) require.NotNil(t, statusResp.Data) @@ -429,7 +379,6 @@ func TestTidyIssuerConfig(t *testing.T) { // Ensure the default auto-tidy config matches expectations resp, err := CBRead(b, s, "config/auto-tidy") - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/auto-tidy"), logical.ReadOperation), resp, true) requireSuccessNonNilResponse(t, resp, err) jsonBlob, err := json.Marshal(&defaultTidyConfig) @@ -444,7 +393,6 @@ func TestTidyIssuerConfig(t *testing.T) { defaultConfigMap["safety_buffer"] = int(time.Duration(defaultConfigMap["safety_buffer"].(float64)) / time.Second) defaultConfigMap["pause_duration"] = time.Duration(defaultConfigMap["pause_duration"].(float64)).String() defaultConfigMap["revocation_queue_safety_buffer"] = int(time.Duration(defaultConfigMap["revocation_queue_safety_buffer"].(float64)) / time.Second) - defaultConfigMap["acme_account_safety_buffer"] = int(time.Duration(defaultConfigMap["acme_account_safety_buffer"].(float64)) / time.Second) require.Equal(t, defaultConfigMap, resp.Data) @@ -453,344 +401,7 @@ func TestTidyIssuerConfig(t *testing.T) { "tidy_expired_issuers": true, "issuer_safety_buffer": "5s", }) - schema.ValidateResponse(t, schema.GetResponseSchema(t, b.Route("config/auto-tidy"), logical.UpdateOperation), resp, true) - requireSuccessNonNilResponse(t, resp, err) require.Equal(t, true, resp.Data["tidy_expired_issuers"]) require.Equal(t, 5, resp.Data["issuer_safety_buffer"]) } - -// TestCertStorageMetrics ensures that when enabled, metrics are able to count the number of certificates in storage and -// number of revoked certificates in storage. Moreover, this test ensures that the gauge is emitted periodically, so -// that the metric does not disappear or go stale. -func TestCertStorageMetrics(t *testing.T) { - // This tests uses the same setup as TestAutoTidy - newPeriod := 1 * time.Second - - // We set up a metrics accumulator - inmemSink := metrics.NewInmemSink( - 2*newPeriod, // A short time period is ideal here to test metrics are emitted every periodic func - 10*newPeriod) // Do not keep a huge amount of metrics in the sink forever, clear them out to save memory usage. - - metricsConf := metrics.DefaultConfig("") - metricsConf.EnableHostname = false - metricsConf.EnableHostnameLabel = false - metricsConf.EnableServiceLabel = false - metricsConf.EnableTypePrefix = false - - _, err := metrics.NewGlobal(metricsConf, inmemSink) - if err != nil { - t.Fatal(err) - } - - // This test requires the periodicFunc to trigger, which requires we stand - // up a full test cluster. - coreConfig := &vault.CoreConfig{ - LogicalBackends: map[string]logical.Factory{ - "pki": Factory, - }, - // See notes below about usage of /sys/raw for reading cluster - // storage without barrier encryption. - EnableRaw: true, - RollbackPeriod: newPeriod, - } - cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - }) - cluster.Start() - defer cluster.Cleanup() - client := cluster.Cores[0].Client - - // Mount PKI - err = client.Sys().Mount("pki", &api.MountInput{ - Type: "pki", - Config: api.MountConfigInput{ - DefaultLeaseTTL: "10m", - MaxLeaseTTL: "60m", - }, - }) - require.NoError(t, err) - - // Generate root. - resp, err := client.Logical().Write("pki/root/generate/internal", map[string]interface{}{ - "ttl": "40h", - "common_name": "Root X1", - "key_type": "ec", - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotEmpty(t, resp.Data) - require.NotEmpty(t, resp.Data["issuer_id"]) - - // Set up a testing role. - _, err = client.Logical().Write("pki/roles/local-testing", map[string]interface{}{ - "allow_any_name": true, - "enforce_hostnames": false, - "key_type": "ec", - }) - require.NoError(t, err) - - // Run tidy so that tidy-status is not empty - _, err = client.Logical().Write("pki/tidy", map[string]interface{}{ - "tidy_revoked_certs": true, - }) - require.NoError(t, err) - - // Since certificate counts are off by default, we shouldn't see counts in the tidy status - tidyStatus, err := client.Logical().Read("pki/tidy-status") - if err != nil { - t.Fatal(err) - } - // backendUUID should exist, we need this for metrics - backendUUID := tidyStatus.Data["internal_backend_uuid"].(string) - // "current_cert_store_count", "current_revoked_cert_count" - countData, ok := tidyStatus.Data["current_cert_store_count"] - if ok && countData != nil { - t.Fatalf("Certificate counting should be off by default, but current cert store count %v appeared in tidy status in unconfigured mount", countData) - } - revokedCountData, ok := tidyStatus.Data["current_revoked_cert_count"] - if ok && revokedCountData != nil { - t.Fatalf("Certificate counting should be off by default, but revoked cert count %v appeared in tidy status in unconfigured mount", revokedCountData) - } - - // Since certificate counts are off by default, those metrics should not exist yet - mostRecentInterval := inmemSink.Data()[len(inmemSink.Data())-1] - _, ok = mostRecentInterval.Gauges["secrets.pki."+backendUUID+".total_revoked_certificates_stored"] - if ok { - t.Fatalf("Certificate counting should be off by default, but revoked cert count was emitted as a metric in an unconfigured mount") - } - _, ok = mostRecentInterval.Gauges["secrets.pki."+backendUUID+".total_certificates_stored"] - if ok { - t.Fatalf("Certificate counting should be off by default, but total certificate count was emitted as a metric in an unconfigured mount") - } - - // Write the auto-tidy config. - _, err = client.Logical().Write("pki/config/auto-tidy", map[string]interface{}{ - "enabled": true, - "interval_duration": "1s", - "tidy_cert_store": true, - "tidy_revoked_certs": true, - "safety_buffer": "1s", - "maintain_stored_certificate_counts": true, - "publish_stored_certificate_count_metrics": false, - }) - require.NoError(t, err) - - // Reload the Mount - Otherwise Stored Certificate Counts Will Not Be Populated - _, err = client.Logical().Write("/sys/plugins/reload/backend", map[string]interface{}{ - "plugin": "pki", - }) - - // By reading the auto-tidy endpoint, we ensure that initialize has completed (which has a write lock on auto-tidy) - _, err = client.Logical().Read("/pki/config/auto-tidy") - if err != nil { - t.Fatal(err) - } - - // Since publish_stored_certificate_count_metrics is still false, these metrics should still not exist yet - mostRecentInterval = inmemSink.Data()[len(inmemSink.Data())-1] - _, ok = mostRecentInterval.Gauges["secrets.pki."+backendUUID+".total_revoked_certificates_stored"] - if ok { - t.Fatalf("Certificate counting should be off by default, but revoked cert count was emitted as a metric in an unconfigured mount") - } - _, ok = mostRecentInterval.Gauges["secrets.pki."+backendUUID+".total_certificates_stored"] - if ok { - t.Fatalf("Certificate counting should be off by default, but total certificate count was emitted as a metric in an unconfigured mount") - } - - // But since certificate counting is on, the metrics should exist on tidyStatus endpoint: - tidyStatus, err = client.Logical().Read("pki/tidy-status") - if err != nil { - t.Fatal(err) - } - // backendUUID should exist, we need this for metrics - backendUUID = tidyStatus.Data["internal_backend_uuid"].(string) - // "current_cert_store_count", "current_revoked_cert_count" - certStoreCount, ok := tidyStatus.Data["current_cert_store_count"] - if !ok { - t.Fatalf("Certificate counting has been turned on, but current cert store count does not appear in tidy status") - } - if certStoreCount != json.Number("1") { - t.Fatalf("Only created one certificate, but a got a certificate count of %v", certStoreCount) - } - revokedCertCount, ok := tidyStatus.Data["current_revoked_cert_count"] - if !ok { - t.Fatalf("Certificate counting has been turned on, but revoked cert store count does not appear in tidy status") - } - if revokedCertCount != json.Number("0") { - t.Fatalf("Have not yet revoked a certificate, but got a revoked cert store count of %v", revokedCertCount) - } - - // Write the auto-tidy config, again, this time turning on metrics - _, err = client.Logical().Write("pki/config/auto-tidy", map[string]interface{}{ - "enabled": true, - "interval_duration": "1s", - "tidy_cert_store": true, - "tidy_revoked_certs": true, - "safety_buffer": "1s", - "maintain_stored_certificate_counts": true, - "publish_stored_certificate_count_metrics": true, - }) - require.NoError(t, err) - - // Issue a cert and revoke it. - resp, err = client.Logical().Write("pki/issue/local-testing", map[string]interface{}{ - "common_name": "example.com", - "ttl": "10s", - }) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["serial_number"]) - require.NotEmpty(t, resp.Data["certificate"]) - leafSerial := resp.Data["serial_number"].(string) - leafCert := parseCert(t, resp.Data["certificate"].(string)) - - // Read cert before revoking - resp, err = client.Logical().Read("pki/cert/" + leafSerial) - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["certificate"]) - revocationTime, err := (resp.Data["revocation_time"].(json.Number)).Int64() - require.Equal(t, int64(0), revocationTime, "revocation time was not zero") - require.Empty(t, resp.Data["revocation_time_rfc3339"], "revocation_time_rfc3339 was not empty") - require.Empty(t, resp.Data["issuer_id"], "issuer_id was not empty") - - _, err = client.Logical().Write("pki/revoke", map[string]interface{}{ - "serial_number": leafSerial, - }) - require.NoError(t, err) - - // We read the auto-tidy endpoint again, to ensure any metrics logic has completed (lock on config) - _, err = client.Logical().Read("/pki/config/auto-tidy") - if err != nil { - t.Fatal(err) - } - - // Check Metrics After Cert Has Be Created and Revoked - tidyStatus, err = client.Logical().Read("pki/tidy-status") - if err != nil { - t.Fatal(err) - } - backendUUID = tidyStatus.Data["internal_backend_uuid"].(string) - certStoreCount, ok = tidyStatus.Data["current_cert_store_count"] - if !ok { - t.Fatalf("Certificate counting has been turned on, but current cert store count does not appear in tidy status") - } - if certStoreCount != json.Number("2") { - t.Fatalf("Created root and leaf certificate, but a got a certificate count of %v", certStoreCount) - } - revokedCertCount, ok = tidyStatus.Data["current_revoked_cert_count"] - if !ok { - t.Fatalf("Certificate counting has been turned on, but revoked cert store count does not appear in tidy status") - } - if revokedCertCount != json.Number("1") { - t.Fatalf("Revoked one certificate, but got a revoked cert store count of %v", revokedCertCount) - } - // This should now be initialized - certCountError, ok := tidyStatus.Data["certificate_counting_error"] - if ok && certCountError.(string) != "" { - t.Fatalf("Expected certificate count error to disappear after initialization, but got error %v", certCountError) - } - - testhelpers.RetryUntil(t, newPeriod*5, func() error { - mostRecentInterval = inmemSink.Data()[len(inmemSink.Data())-1] - revokedCertCountGaugeValue, ok := mostRecentInterval.Gauges["secrets.pki."+backendUUID+".total_revoked_certificates_stored"] - if !ok { - return errors.New("turned on metrics, but revoked cert count was not emitted") - } - if revokedCertCountGaugeValue.Value != 1 { - return fmt.Errorf("revoked one certificate, but metrics emitted a revoked cert store count of %v", revokedCertCountGaugeValue) - } - certStoreCountGaugeValue, ok := mostRecentInterval.Gauges["secrets.pki."+backendUUID+".total_certificates_stored"] - if !ok { - return errors.New("turned on metrics, but total certificate count was not emitted") - } - if certStoreCountGaugeValue.Value != 2 { - return fmt.Errorf("stored two certificiates, but total certificate count emitted was %v", certStoreCountGaugeValue.Value) - } - return nil - }) - - // Wait for cert to expire and the safety buffer to elapse. - time.Sleep(time.Until(leafCert.NotAfter) + 3*time.Second) - - // Wait for auto-tidy to run afterwards. - var foundTidyRunning string - var foundTidyFinished bool - timeoutChan := time.After(120 * time.Second) - for { - if foundTidyRunning != "" && foundTidyFinished { - break - } - - select { - case <-timeoutChan: - t.Fatalf("expected auto-tidy to run (%v) and finish (%v) before 120 seconds elapsed", foundTidyRunning, foundTidyFinished) - default: - time.Sleep(250 * time.Millisecond) - - resp, err = client.Logical().Read("pki/tidy-status") - require.NoError(t, err) - require.NotNil(t, resp) - require.NotNil(t, resp.Data) - require.NotEmpty(t, resp.Data["state"]) - require.NotEmpty(t, resp.Data["time_started"]) - state := resp.Data["state"].(string) - started := resp.Data["time_started"].(string) - t.Logf("Resp: %v", resp.Data) - - // We want the _next_ tidy run after the cert expires. This - // means if we're currently finished when we hit this the - // first time, we want to wait for the next run. - if foundTidyRunning == "" { - foundTidyRunning = started - } else if foundTidyRunning != started && !foundTidyFinished && state == "Finished" { - foundTidyFinished = true - } - } - } - - // After Tidy, Cert Store Count Should Still Be Available, and Be Updated: - // Check Metrics After Cert Has Be Created and Revoked - tidyStatus, err = client.Logical().Read("pki/tidy-status") - if err != nil { - t.Fatal(err) - } - backendUUID = tidyStatus.Data["internal_backend_uuid"].(string) - // "current_cert_store_count", "current_revoked_cert_count" - certStoreCount, ok = tidyStatus.Data["current_cert_store_count"] - if !ok { - t.Fatalf("Certificate counting has been turned on, but current cert store count does not appear in tidy status") - } - if certStoreCount != json.Number("1") { - t.Fatalf("Created root and leaf certificate, deleted leaf, but a got a certificate count of %v", certStoreCount) - } - revokedCertCount, ok = tidyStatus.Data["current_revoked_cert_count"] - if !ok { - t.Fatalf("Certificate counting has been turned on, but revoked cert store count does not appear in tidy status") - } - if revokedCertCount != json.Number("0") { - t.Fatalf("Revoked certificate has been tidied, but got a revoked cert store count of %v", revokedCertCount) - } - - testhelpers.RetryUntil(t, newPeriod*5, func() error { - mostRecentInterval = inmemSink.Data()[len(inmemSink.Data())-1] - revokedCertCountGaugeValue, ok := mostRecentInterval.Gauges["secrets.pki."+backendUUID+".total_revoked_certificates_stored"] - if !ok { - return errors.New("turned on metrics, but revoked cert count was not emitted") - } - if revokedCertCountGaugeValue.Value != 0 { - return fmt.Errorf("revoked certificate has been tidied, but metrics emitted a revoked cert store count of %v", revokedCertCountGaugeValue) - } - certStoreCountGaugeValue, ok := mostRecentInterval.Gauges["secrets.pki."+backendUUID+".total_certificates_stored"] - if !ok { - return errors.New("turned on metrics, but total certificate count was not emitted") - } - if certStoreCountGaugeValue.Value != 1 { - return fmt.Errorf("only one of two certificates left after tidy, but total certificate count emitted was %v", certStoreCountGaugeValue.Value) - } - return nil - }) -} diff --git a/builtin/logical/pki/periodic.go b/builtin/logical/pki/periodic.go index 77ff31212ef96f..452ef9dcbf6120 100644 --- a/builtin/logical/pki/periodic.go +++ b/builtin/logical/pki/periodic.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/secret_certs.go b/builtin/logical/pki/secret_certs.go index 11ebcd2ac7cb3a..00c7339139905d 100644 --- a/builtin/logical/pki/secret_certs.go +++ b/builtin/logical/pki/secret_certs.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/storage.go b/builtin/logical/pki/storage.go index 52b0faf2ca3b67..45fbf017fba09b 100644 --- a/builtin/logical/pki/storage.go +++ b/builtin/logical/pki/storage.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -8,7 +5,6 @@ import ( "context" "crypto" "crypto/x509" - "errors" "fmt" "sort" "strings" @@ -21,8 +17,6 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -var ErrStorageItemNotFound = errors.New("storage item not found") - const ( storageKeyConfig = "config/keys" storageIssuerConfig = "config/issuers" @@ -287,15 +281,6 @@ func (b *backend) makeStorageContext(ctx context.Context, s logical.Storage) *st } } -func (sc *storageContext) WithFreshTimeout(timeout time.Duration) (*storageContext, context.CancelFunc) { - ctx, cancel := context.WithTimeout(context.Background(), timeout) - return &storageContext{ - Context: ctx, - Storage: sc.Storage, - Backend: sc.Backend, - }, cancel -} - func (sc *storageContext) listKeys() ([]keyID, error) { strList, err := sc.Storage.List(sc.Context, keyPrefix) if err != nil { @@ -1325,10 +1310,9 @@ func (sc *storageContext) getRevocationConfig() (*crlConfig, error) { result.Expiry = defaultCrlConfig.Expiry } - isLocalMount := sc.Backend.System().LocalMount() - if (!constants.IsEnterprise || isLocalMount) && (result.UnifiedCRLOnExistingPaths || result.UnifiedCRL || result.UseGlobalQueue) { + if !constants.IsEnterprise && (result.UnifiedCRLOnExistingPaths || result.UnifiedCRL || result.UseGlobalQueue) { // An end user must have had Enterprise, enabled the unified config args and then downgraded to OSS. - sc.Backend.Logger().Warn("Not running Vault Enterprise or using a local mount, " + + sc.Backend.Logger().Warn("Not running Vault Enterprise, " + "disabling unified_crl, unified_crl_on_existing_paths and cross_cluster_revocation config flags.") result.UnifiedCRLOnExistingPaths = false result.UnifiedCRL = false @@ -1367,33 +1351,7 @@ func (sc *storageContext) writeAutoTidyConfig(config *tidyConfig) error { return err } - err = sc.Storage.Put(sc.Context, entry) - if err != nil { - return err - } - - sc.Backend.publishCertCountMetrics.Store(config.PublishMetrics) - - // To Potentially Disable Certificate Counting - if config.MaintainCount == false { - certCountWasEnabled := sc.Backend.certCountEnabled.Swap(config.MaintainCount) - if certCountWasEnabled { - sc.Backend.certsCounted.Store(true) - sc.Backend.certCountError = "Cert Count is Disabled: enable via Tidy Config maintain_stored_certificate_counts" - sc.Backend.possibleDoubleCountedSerials = nil // This won't stop a list operation, but will stop an expensive clean-up during initialize - sc.Backend.possibleDoubleCountedRevokedSerials = nil // This won't stop a list operation, but will stop an expensive clean-up during initialize - sc.Backend.certCount.Store(0) - sc.Backend.revokedCertCount.Store(0) - } - } else { // To Potentially Enable Certificate Counting - if sc.Backend.certCountEnabled.Load() == false { - // We haven't written "re-enable certificate counts" outside the initialize function - // Any call derived call to do so is likely to time out on ~2 million certs - sc.Backend.certCountError = "Certificate Counting Has Not Been Initialized, re-initialize this mount" - } - } - - return nil + return sc.Storage.Put(sc.Context, entry) } func (sc *storageContext) listRevokedCerts() ([]string, error) { @@ -1441,7 +1399,7 @@ func (sc *storageContext) fetchRevocationInfo(serial string) (*revocationInfo, e if revEntry != nil { err = revEntry.DecodeJSON(&revInfo) if err != nil { - return nil, fmt.Errorf("error decoding existing revocation info: %w", err) + return nil, fmt.Errorf("error decoding existing revocation info") } } diff --git a/builtin/logical/pki/storage_migrations.go b/builtin/logical/pki/storage_migrations.go index f4b9237266b763..9104e5c6f3afb9 100644 --- a/builtin/logical/pki/storage_migrations.go +++ b/builtin/logical/pki/storage_migrations.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/storage_migrations_test.go b/builtin/logical/pki/storage_migrations_test.go index 754f3993d14b20..b603321dd03e30 100644 --- a/builtin/logical/pki/storage_migrations_test.go +++ b/builtin/logical/pki/storage_migrations_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/storage_test.go b/builtin/logical/pki/storage_test.go index 625c046d00c86e..17760653b7b377 100644 --- a/builtin/logical/pki/storage_test.go +++ b/builtin/logical/pki/storage_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/storage_unified.go b/builtin/logical/pki/storage_unified.go index 28c656bb8bb65f..6c426a3bade5bf 100644 --- a/builtin/logical/pki/storage_unified.go +++ b/builtin/logical/pki/storage_unified.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pki/test_helpers.go b/builtin/logical/pki/test_helpers.go index 0a003438ba8a25..491575138a663d 100644 --- a/builtin/logical/pki/test_helpers.go +++ b/builtin/logical/pki/test_helpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( @@ -66,21 +63,6 @@ func requireSignedBy(t *testing.T, cert *x509.Certificate, signingCert *x509.Cer } } -func requireSignedByAtPath(t *testing.T, client *api.Client, leaf *x509.Certificate, path string) { - t.Helper() - - resp, err := client.Logical().Read(path) - require.NoError(t, err, "got unexpected error fetching parent certificate") - require.NotNil(t, resp, "missing response when fetching parent certificate") - require.NotNil(t, resp.Data, "missing data from parent certificate response") - require.NotNil(t, resp.Data["certificate"], "missing certificate field on parent read response") - - parentCert := resp.Data["certificate"].(string) - parent := parseCert(t, parentCert) - - requireSignedBy(t, leaf, parent) -} - // Certificate helper func parseCert(t *testing.T, pemCert string) *x509.Certificate { t.Helper() @@ -226,10 +208,6 @@ func CBReq(b *backend, s logical.Storage, operation logical.Operation, path stri return resp, nil } -func CBHeader(b *backend, s logical.Storage, path string) (*logical.Response, error) { - return CBReq(b, s, logical.HeaderOperation, path, make(map[string]interface{})) -} - func CBRead(b *backend, s logical.Storage, path string) (*logical.Response, error) { return CBReq(b, s, logical.ReadOperation, path, make(map[string]interface{})) } diff --git a/builtin/logical/pki/util.go b/builtin/logical/pki/util.go index d90e055e6cbcbb..d28ba60e2b2552 100644 --- a/builtin/logical/pki/util.go +++ b/builtin/logical/pki/util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pki import ( diff --git a/builtin/logical/pkiext/nginx_test.go b/builtin/logical/pkiext/nginx_test.go index 1532f3e81f6ca3..9992627c844882 100644 --- a/builtin/logical/pkiext/nginx_test.go +++ b/builtin/logical/pkiext/nginx_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pkiext import ( @@ -17,9 +14,11 @@ import ( "testing" "time" - "github.com/hashicorp/go-uuid" "github.com/hashicorp/vault/builtin/logical/pki" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" + + "github.com/hashicorp/go-uuid" + "github.com/stretchr/testify/require" ) @@ -230,7 +229,7 @@ func CheckWithClients(t *testing.T, network string, address string, url string, // Start our service with a random name to not conflict with other // threads. ctx := context.Background() - result, err := cwRunner.Start(ctx, true, false) + ctr, _, _, err := cwRunner.Start(ctx, true, false) if err != nil { t.Fatalf("Could not start golang container for wget/curl checks: %s", err) } @@ -256,14 +255,14 @@ func CheckWithClients(t *testing.T, network string, address string, url string, wgetCmd = []string{"wget", "--verbose", "--ca-certificate=/root.pem", "--certificate=/client-cert.pem", "--private-key=/client-privkey.pem", url} curlCmd = []string{"curl", "--verbose", "--cacert", "/root.pem", "--cert", "/client-cert.pem", "--key", "/client-privkey.pem", url} } - if err := cwRunner.CopyTo(result.Container.ID, "/", certCtx); err != nil { + if err := cwRunner.CopyTo(ctr.ID, "/", certCtx); err != nil { t.Fatalf("Could not copy certificate and key into container: %v", err) } for _, cmd := range [][]string{hostPrimeCmd, wgetCmd, curlCmd} { t.Logf("Running client connection command: %v", cmd) - stdout, stderr, retcode, err := cwRunner.RunCmdWithOutput(ctx, result.Container.ID, cmd) + stdout, stderr, retcode, err := cwRunner.RunCmdWithOutput(ctx, ctr.ID, cmd) if err != nil { t.Fatalf("Could not run command (%v) in container: %v", cmd, err) } @@ -293,7 +292,7 @@ func CheckDeltaCRL(t *testing.T, network string, address string, url string, roo // Start our service with a random name to not conflict with other // threads. ctx := context.Background() - result, err := cwRunner.Start(ctx, true, false) + ctr, _, _, err := cwRunner.Start(ctx, true, false) if err != nil { t.Fatalf("Could not start golang container for wget2 delta CRL checks: %s", err) } @@ -311,14 +310,14 @@ func CheckDeltaCRL(t *testing.T, network string, address string, url string, roo certCtx := docker.NewBuildContext() certCtx["root.pem"] = docker.PathContentsFromString(rootCert) certCtx["crls.pem"] = docker.PathContentsFromString(crls) - if err := cwRunner.CopyTo(result.Container.ID, "/", certCtx); err != nil { + if err := cwRunner.CopyTo(ctr.ID, "/", certCtx); err != nil { t.Fatalf("Could not copy certificate and key into container: %v", err) } for index, cmd := range [][]string{hostPrimeCmd, wgetCmd} { t.Logf("Running client connection command: %v", cmd) - stdout, stderr, retcode, err := cwRunner.RunCmdWithOutput(ctx, result.Container.ID, cmd) + stdout, stderr, retcode, err := cwRunner.RunCmdWithOutput(ctx, ctr.ID, cmd) if err != nil { t.Fatalf("Could not run command (%v) in container: %v", cmd, err) } diff --git a/builtin/logical/pkiext/pkiext_binary/acme_test.go b/builtin/logical/pkiext/pkiext_binary/acme_test.go deleted file mode 100644 index 5cdd146964cfb3..00000000000000 --- a/builtin/logical/pkiext/pkiext_binary/acme_test.go +++ /dev/null @@ -1,937 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pkiext_binary - -import ( - "context" - "crypto/ecdsa" - "crypto/elliptic" - "crypto/rand" - "crypto/rsa" - "crypto/tls" - "crypto/x509" - "crypto/x509/pkix" - "encoding/hex" - "errors" - "fmt" - "net" - "net/http" - "path" - "testing" - "time" - - "golang.org/x/crypto/acme" - - "github.com/hashicorp/vault/builtin/logical/pkiext" - "github.com/hashicorp/vault/helper/testhelpers" - "github.com/hashicorp/vault/sdk/helper/certutil" - hDocker "github.com/hashicorp/vault/sdk/helper/docker" - "github.com/stretchr/testify/require" -) - -// Test_ACME will start a Vault cluster using the docker based binary, and execute -// a bunch of sub-tests against that cluster. It is up to each sub-test to run/configure -// a new pki mount within the cluster to not interfere with each other. -func Test_ACME(t *testing.T) { - cluster := NewVaultPkiClusterWithDNS(t) - defer cluster.Cleanup() - - tc := map[string]func(t *testing.T, cluster *VaultPkiCluster){ - "certbot": SubtestACMECertbot, - "certbot eab": SubtestACMECertbotEab, - "acme ip sans": SubtestACMEIPAndDNS, - "acme wildcard": SubtestACMEWildcardDNS, - "acme prevents ica": SubtestACMEPreventsICADNS, - } - - // Wrap the tests within an outer group, so that we run all tests - // in parallel, but still wait for all tests to finish before completing - // and running the cleanup of the Vault cluster. - t.Run("group", func(gt *testing.T) { - for testName := range tc { - // Trap the function to be embedded later in the run so it - // doesn't get clobbered on the next for iteration - testFunc := tc[testName] - - gt.Run(testName, func(st *testing.T) { - st.Parallel() - testFunc(st, cluster) - }) - } - }) - - // Do not run these tests in parallel. - t.Run("step down", func(gt *testing.T) { SubtestACMEStepDownNode(gt, cluster) }) -} - -func SubtestACMECertbot(t *testing.T, cluster *VaultPkiCluster) { - pki, err := cluster.CreateAcmeMount("pki") - require.NoError(t, err, "failed setting up acme mount") - - directory := "https://" + pki.GetActiveContainerIP() + ":8200/v1/pki/acme/directory" - vaultNetwork := pki.GetContainerNetworkName() - - logConsumer, logStdout, logStderr := getDockerLog(t) - - // Default to 45 second timeout, but bump to 120 when running locally or if nightly regression - // flag is provided. - sleepTimer := "45" - if testhelpers.IsLocalOrRegressionTests() { - sleepTimer = "120" - } - - t.Logf("creating on network: %v", vaultNetwork) - runner, err := hDocker.NewServiceRunner(hDocker.RunOptions{ - ImageRepo: "docker.mirror.hashicorp.services/certbot/certbot", - ImageTag: "latest", - ContainerName: "vault_pki_certbot_test", - NetworkName: vaultNetwork, - Entrypoint: []string{"sleep", sleepTimer}, - LogConsumer: logConsumer, - LogStdout: logStdout, - LogStderr: logStderr, - }) - require.NoError(t, err, "failed creating service runner") - - ctx := context.Background() - result, err := runner.Start(ctx, true, false) - require.NoError(t, err, "could not start container") - require.NotNil(t, result, "could not start container") - - defer runner.Stop(context.Background(), result.Container.ID) - - networks, err := runner.GetNetworkAndAddresses(result.Container.ID) - require.NoError(t, err, "could not read container's IP address") - require.Contains(t, networks, vaultNetwork, "expected to contain vault network") - - ipAddr := networks[vaultNetwork] - hostname := "certbot-acme-client.dadgarcorp.com" - - err = pki.AddHostname(hostname, ipAddr) - require.NoError(t, err, "failed to update vault host files") - - // Sinkhole a domain that's invalid just in case it's registered in the future. - cluster.Dns.AddDomain("armoncorp.com") - cluster.Dns.AddRecord("armoncorp.com", "A", "127.0.0.1") - - certbotCmd := []string{ - "certbot", - "certonly", - "--no-eff-email", - "--email", "certbot.client@dadgarcorp.com", - "--agree-tos", - "--no-verify-ssl", - "--standalone", - "--non-interactive", - "--server", directory, - "-d", hostname, - } - logCatCmd := []string{"cat", "/var/log/letsencrypt/letsencrypt.log"} - - stdout, stderr, retcode, err := runner.RunCmdWithOutput(ctx, result.Container.ID, certbotCmd) - t.Logf("Certbot Issue Command: %v\nstdout: %v\nstderr: %v\n", certbotCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running issue command") - require.Equal(t, 0, retcode, "expected zero retcode issue command result") - - // N.B. We're using the `certonly` subcommand here because it seems as though the `renew` command - // attempts to install the cert for you. This ends up hanging and getting killed by docker, but is - // also not desired behavior. The certbot docs suggest using `certonly` to renew as seen here: - // https://eff-certbot.readthedocs.io/en/stable/using.html#renewing-certificates - certbotRenewCmd := []string{ - "certbot", - "certonly", - "--no-eff-email", - "--email", "certbot.client@dadgarcorp.com", - "--agree-tos", - "--no-verify-ssl", - "--standalone", - "--non-interactive", - "--server", directory, - "-d", hostname, - "--cert-name", hostname, - "--force-renewal", - } - - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotRenewCmd) - t.Logf("Certbot Renew Command: %v\nstdout: %v\nstderr: %v\n", certbotRenewCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running renew command") - require.Equal(t, 0, retcode, "expected zero retcode renew command result") - - certbotRevokeCmd := []string{ - "certbot", - "revoke", - "--no-eff-email", - "--email", "certbot.client@dadgarcorp.com", - "--agree-tos", - "--no-verify-ssl", - "--non-interactive", - "--no-delete-after-revoke", - "--cert-name", hostname, - } - - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotRevokeCmd) - t.Logf("Certbot Revoke Command: %v\nstdout: %v\nstderr: %v\n", certbotRevokeCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running revoke command") - require.Equal(t, 0, retcode, "expected zero retcode revoke command result") - - // Revoking twice should fail. - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotRevokeCmd) - t.Logf("Certbot Double Revoke Command: %v\nstdout: %v\nstderr: %v\n", certbotRevokeCmd, string(stdout), string(stderr)) - if err != nil || retcode == 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - - require.NoError(t, err, "got error running double revoke command") - require.NotEqual(t, 0, retcode, "expected non-zero retcode double revoke command result") - - // Attempt to issue against a domain that doesn't match the challenge. - // N.B. This test only runs locally or when the nightly regression env var is provided to CI. - if testhelpers.IsLocalOrRegressionTests() { - certbotInvalidIssueCmd := []string{ - "certbot", - "certonly", - "--no-eff-email", - "--email", "certbot.client@dadgarcorp.com", - "--agree-tos", - "--no-verify-ssl", - "--standalone", - "--non-interactive", - "--server", directory, - "-d", "armoncorp.com", - "--issuance-timeout", "10", - } - - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotInvalidIssueCmd) - t.Logf("Certbot Invalid Issue Command: %v\nstdout: %v\nstderr: %v\n", certbotInvalidIssueCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running issue command") - require.NotEqual(t, 0, retcode, "expected non-zero retcode issue command result") - } - - // Attempt to close out our ACME account - certbotUnregisterCmd := []string{ - "certbot", - "unregister", - "--no-verify-ssl", - "--non-interactive", - "--server", directory, - } - - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotUnregisterCmd) - t.Logf("Certbot Unregister Command: %v\nstdout: %v\nstderr: %v\n", certbotUnregisterCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running unregister command") - require.Equal(t, 0, retcode, "expected zero retcode unregister command result") - - // Attempting to close out our ACME account twice should fail - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotUnregisterCmd) - t.Logf("Certbot double Unregister Command: %v\nstdout: %v\nstderr: %v\n", certbotUnregisterCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot double logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running double unregister command") - require.Equal(t, 1, retcode, "expected non-zero retcode double unregister command result") -} - -func SubtestACMECertbotEab(t *testing.T, cluster *VaultPkiCluster) { - mountName := "pki-certbot-eab" - pki, err := cluster.CreateAcmeMount(mountName) - require.NoError(t, err, "failed setting up acme mount") - - err = pki.UpdateAcmeConfig(true, map[string]interface{}{ - "eab_policy": "new-account-required", - }) - require.NoError(t, err) - - eabId, base64EabKey, err := pki.GetEabKey("acme/") - - directory := "https://" + pki.GetActiveContainerIP() + ":8200/v1/" + mountName + "/acme/directory" - vaultNetwork := pki.GetContainerNetworkName() - - logConsumer, logStdout, logStderr := getDockerLog(t) - - t.Logf("creating on network: %v", vaultNetwork) - runner, err := hDocker.NewServiceRunner(hDocker.RunOptions{ - ImageRepo: "docker.mirror.hashicorp.services/certbot/certbot", - ImageTag: "latest", - ContainerName: "vault_pki_certbot_eab_test", - NetworkName: vaultNetwork, - Entrypoint: []string{"sleep", "45"}, - LogConsumer: logConsumer, - LogStdout: logStdout, - LogStderr: logStderr, - }) - require.NoError(t, err, "failed creating service runner") - - ctx := context.Background() - result, err := runner.Start(ctx, true, false) - require.NoError(t, err, "could not start container") - require.NotNil(t, result, "could not start container") - - defer runner.Stop(context.Background(), result.Container.ID) - - networks, err := runner.GetNetworkAndAddresses(result.Container.ID) - require.NoError(t, err, "could not read container's IP address") - require.Contains(t, networks, vaultNetwork, "expected to contain vault network") - - ipAddr := networks[vaultNetwork] - hostname := "certbot-eab-acme-client.dadgarcorp.com" - - err = pki.AddHostname(hostname, ipAddr) - require.NoError(t, err, "failed to update vault host files") - - certbotCmd := []string{ - "certbot", - "certonly", - "--no-eff-email", - "--email", "certbot.client@dadgarcorp.com", - "--eab-kid", eabId, - "--eab-hmac-key='" + base64EabKey + "'", - "--agree-tos", - "--no-verify-ssl", - "--standalone", - "--non-interactive", - "--server", directory, - "-d", hostname, - } - logCatCmd := []string{"cat", "/var/log/letsencrypt/letsencrypt.log"} - - stdout, stderr, retcode, err := runner.RunCmdWithOutput(ctx, result.Container.ID, certbotCmd) - t.Logf("Certbot Issue Command: %v\nstdout: %v\nstderr: %v\n", certbotCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running issue command") - require.Equal(t, 0, retcode, "expected zero retcode issue command result") - - certbotRenewCmd := []string{ - "certbot", - "certonly", - "--no-eff-email", - "--email", "certbot.client@dadgarcorp.com", - "--agree-tos", - "--no-verify-ssl", - "--standalone", - "--non-interactive", - "--server", directory, - "-d", hostname, - "--cert-name", hostname, - "--force-renewal", - } - - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotRenewCmd) - t.Logf("Certbot Renew Command: %v\nstdout: %v\nstderr: %v\n", certbotRenewCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running renew command") - require.Equal(t, 0, retcode, "expected zero retcode renew command result") - - certbotRevokeCmd := []string{ - "certbot", - "revoke", - "--no-eff-email", - "--email", "certbot.client@dadgarcorp.com", - "--agree-tos", - "--no-verify-ssl", - "--non-interactive", - "--no-delete-after-revoke", - "--cert-name", hostname, - } - - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotRevokeCmd) - t.Logf("Certbot Revoke Command: %v\nstdout: %v\nstderr: %v\n", certbotRevokeCmd, string(stdout), string(stderr)) - if err != nil || retcode != 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - require.NoError(t, err, "got error running revoke command") - require.Equal(t, 0, retcode, "expected zero retcode revoke command result") - - // Revoking twice should fail. - stdout, stderr, retcode, err = runner.RunCmdWithOutput(ctx, result.Container.ID, certbotRevokeCmd) - t.Logf("Certbot Double Revoke Command: %v\nstdout: %v\nstderr: %v\n", certbotRevokeCmd, string(stdout), string(stderr)) - if err != nil || retcode == 0 { - logsStdout, logsStderr, _, _ := runner.RunCmdWithOutput(ctx, result.Container.ID, logCatCmd) - t.Logf("Certbot logs\nstdout: %v\nstderr: %v\n", string(logsStdout), string(logsStderr)) - } - - require.NoError(t, err, "got error running double revoke command") - require.NotEqual(t, 0, retcode, "expected non-zero retcode double revoke command result") -} - -func SubtestACMEIPAndDNS(t *testing.T, cluster *VaultPkiCluster) { - pki, err := cluster.CreateAcmeMount("pki-ip-dns-sans") - require.NoError(t, err, "failed setting up acme mount") - - // Since we interact with ACME from outside the container network the ACME - // configuration needs to be updated to use the host port and not the internal - // docker ip. - basePath, err := pki.UpdateClusterConfigLocalAddr() - require.NoError(t, err, "failed updating cluster config") - - logConsumer, logStdout, logStderr := getDockerLog(t) - - // Setup an nginx container that we can have respond the queries for ips - runner, err := hDocker.NewServiceRunner(hDocker.RunOptions{ - ImageRepo: "docker.mirror.hashicorp.services/nginx", - ImageTag: "latest", - ContainerName: "vault_pki_ipsans_test", - NetworkName: pki.GetContainerNetworkName(), - LogConsumer: logConsumer, - LogStdout: logStdout, - LogStderr: logStderr, - }) - require.NoError(t, err, "failed creating service runner") - - ctx := context.Background() - result, err := runner.Start(ctx, true, false) - require.NoError(t, err, "could not start container") - require.NotNil(t, result, "could not start container") - - nginxContainerId := result.Container.ID - defer runner.Stop(context.Background(), nginxContainerId) - networks, err := runner.GetNetworkAndAddresses(nginxContainerId) - - challengeFolder := "/usr/share/nginx/html/.well-known/acme-challenge/" - createChallengeFolderCmd := []string{ - "sh", "-c", - "mkdir -p '" + challengeFolder + "'", - } - stdout, stderr, retcode, err := runner.RunCmdWithOutput(ctx, nginxContainerId, createChallengeFolderCmd) - require.NoError(t, err, "failed to create folder in nginx container") - t.Logf("Update host file command: %v\nstdout: %v\nstderr: %v", createChallengeFolderCmd, string(stdout), string(stderr)) - require.Equal(t, 0, retcode, "expected zero retcode from mkdir in nginx container") - - ipAddr := networks[pki.GetContainerNetworkName()] - hostname := "go-lang-acme-client.dadgarcorp.com" - - err = pki.AddHostname(hostname, ipAddr) - require.NoError(t, err, "failed to update vault host files") - - // Perform an ACME lifecycle with an order that contains both an IP and a DNS name identifier - err = pki.UpdateRole("ip-dns-sans", map[string]interface{}{ - "key_type": "any", - "allowed_domains": "dadgarcorp.com", - "allow_subdomains": true, - "allow_wildcard_certificates": false, - }) - require.NoError(t, err, "failed creating role ip-dns-sans") - - directoryUrl := basePath + "/roles/ip-dns-sans/acme/directory" - acmeOrderIdentifiers := []acme.AuthzID{ - {Type: "ip", Value: ipAddr}, - {Type: "dns", Value: hostname}, - } - cr := &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: hostname}, - DNSNames: []string{hostname}, - IPAddresses: []net.IP{net.ParseIP(ipAddr)}, - } - - provisioningFunc := func(acmeClient *acme.Client, auths []*acme.Authorization) []*acme.Challenge { - // For each http-01 challenge, generate the file to place underneath the nginx challenge folder - acmeCtx := hDocker.NewBuildContext() - var challengesToAccept []*acme.Challenge - for _, auth := range auths { - for _, challenge := range auth.Challenges { - if challenge.Status != acme.StatusPending { - t.Logf("ignoring challenge not in status pending: %v", challenge) - continue - } - - if challenge.Type == "http-01" { - challengeBody, err := acmeClient.HTTP01ChallengeResponse(challenge.Token) - require.NoError(t, err, "failed generating challenge response") - - challengePath := acmeClient.HTTP01ChallengePath(challenge.Token) - require.NoError(t, err, "failed generating challenge path") - - challengeFile := path.Base(challengePath) - - acmeCtx[challengeFile] = hDocker.PathContentsFromString(challengeBody) - - challengesToAccept = append(challengesToAccept, challenge) - } - } - } - - require.GreaterOrEqual(t, len(challengesToAccept), 1, "Need at least one challenge, got none") - - // Copy all challenges within the nginx container - err = runner.CopyTo(nginxContainerId, challengeFolder, acmeCtx) - require.NoError(t, err, "failed copying challenges to container") - - return challengesToAccept - } - - acmeCert := doAcmeValidationWithGoLibrary(t, directoryUrl, acmeOrderIdentifiers, cr, provisioningFunc, "") - - require.Len(t, acmeCert.IPAddresses, 1, "expected only a single ip address in cert") - require.Equal(t, ipAddr, acmeCert.IPAddresses[0].String()) - require.Equal(t, []string{hostname}, acmeCert.DNSNames) - require.Equal(t, hostname, acmeCert.Subject.CommonName) - - // Perform an ACME lifecycle with an order that contains just an IP identifier - err = pki.UpdateRole("ip-sans", map[string]interface{}{ - "key_type": "any", - "use_csr_common_name": false, - "require_cn": false, - "client_flag": false, - }) - require.NoError(t, err, "failed creating role ip-sans") - - directoryUrl = basePath + "/roles/ip-sans/acme/directory" - acmeOrderIdentifiers = []acme.AuthzID{ - {Type: "ip", Value: ipAddr}, - } - cr = &x509.CertificateRequest{ - IPAddresses: []net.IP{net.ParseIP(ipAddr)}, - } - - acmeCert = doAcmeValidationWithGoLibrary(t, directoryUrl, acmeOrderIdentifiers, cr, provisioningFunc, "") - - require.Len(t, acmeCert.IPAddresses, 1, "expected only a single ip address in cert") - require.Equal(t, ipAddr, acmeCert.IPAddresses[0].String()) - require.Empty(t, acmeCert.DNSNames, "acme cert dns name field should have been empty") - require.Equal(t, "", acmeCert.Subject.CommonName) -} - -type acmeGoValidatorProvisionerFunc func(acmeClient *acme.Client, auths []*acme.Authorization) []*acme.Challenge - -func doAcmeValidationWithGoLibrary(t *testing.T, directoryUrl string, acmeOrderIdentifiers []acme.AuthzID, cr *x509.CertificateRequest, provisioningFunc acmeGoValidatorProvisionerFunc, expectedFailure string) *x509.Certificate { - // Since we are contacting Vault through the host ip/port, the certificate will not validate properly - tr := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - } - httpClient := &http.Client{Transport: tr} - - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa account key") - - t.Logf("Using the following url for the ACME directory: %s", directoryUrl) - acmeClient := &acme.Client{ - Key: accountKey, - HTTPClient: httpClient, - DirectoryURL: directoryUrl, - } - - testCtx, cancelFunc := context.WithTimeout(context.Background(), 2*time.Minute) - defer cancelFunc() - - // Create new account - _, err = acmeClient.Register(testCtx, &acme.Account{Contact: []string{"mailto:ipsans@dadgarcorp.com"}}, - func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - - // Create an ACME order - order, err := acmeClient.AuthorizeOrder(testCtx, acmeOrderIdentifiers) - require.NoError(t, err, "failed creating ACME order") - - var auths []*acme.Authorization - for _, authUrl := range order.AuthzURLs { - authorization, err := acmeClient.GetAuthorization(testCtx, authUrl) - require.NoError(t, err, "failed to lookup authorization at url: %s", authUrl) - auths = append(auths, authorization) - } - - // Handle the validation using the external validation mechanism. - challengesToAccept := provisioningFunc(acmeClient, auths) - require.NotEmpty(t, challengesToAccept, "provisioning function failed to return any challenges to accept") - - // Tell the ACME server, that they can now validate those challenges. - for _, challenge := range challengesToAccept { - _, err = acmeClient.Accept(testCtx, challenge) - require.NoError(t, err, "failed to accept challenge: %v", challenge) - } - - // Wait for the order/challenges to be validated. - _, err = acmeClient.WaitOrder(testCtx, order.URI) - require.NoError(t, err, "failed waiting for order to be ready") - - // Create/sign the CSR and ask ACME server to sign it returning us the final certificate - csrKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - csr, err := x509.CreateCertificateRequest(rand.Reader, cr, csrKey) - require.NoError(t, err, "failed generating csr") - - t.Logf("[TEST-LOG] Created CSR: %v", hex.EncodeToString(csr)) - - certs, _, err := acmeClient.CreateOrderCert(testCtx, order.FinalizeURL, csr, false) - if err != nil { - if expectedFailure != "" { - require.Contains(t, err.Error(), expectedFailure, "got a unexpected failure not matching expected value") - return nil - } - - require.NoError(t, err, "failed to get a certificate back from ACME") - } else if expectedFailure != "" { - t.Fatalf("expected failure containing: %s got none", expectedFailure) - } - - acmeCert, err := x509.ParseCertificate(certs[0]) - require.NoError(t, err, "failed parsing acme cert bytes") - - return acmeCert -} - -func SubtestACMEWildcardDNS(t *testing.T, cluster *VaultPkiCluster) { - pki, err := cluster.CreateAcmeMount("pki-dns-wildcards") - require.NoError(t, err, "failed setting up acme mount") - - // Since we interact with ACME from outside the container network the ACME - // configuration needs to be updated to use the host port and not the internal - // docker ip. - basePath, err := pki.UpdateClusterConfigLocalAddr() - require.NoError(t, err, "failed updating cluster config") - - hostname := "go-lang-wildcard-client.dadgarcorp.com" - wildcard := "*." + hostname - - // Do validation without a role first. - directoryUrl := basePath + "/acme/directory" - acmeOrderIdentifiers := []acme.AuthzID{ - {Type: "dns", Value: hostname}, - {Type: "dns", Value: wildcard}, - } - cr := &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: wildcard}, - DNSNames: []string{hostname, wildcard}, - } - - provisioningFunc := func(acmeClient *acme.Client, auths []*acme.Authorization) []*acme.Challenge { - // For each dns-01 challenge, place the record in the associated DNS resolver. - var challengesToAccept []*acme.Challenge - for _, auth := range auths { - for _, challenge := range auth.Challenges { - if challenge.Status != acme.StatusPending { - t.Logf("ignoring challenge not in status pending: %v", challenge) - continue - } - - if challenge.Type == "dns-01" { - challengeBody, err := acmeClient.DNS01ChallengeRecord(challenge.Token) - require.NoError(t, err, "failed generating challenge response") - - err = pki.AddDNSRecord("_acme-challenge."+auth.Identifier.Value, "TXT", challengeBody) - require.NoError(t, err, "failed setting DNS record") - - challengesToAccept = append(challengesToAccept, challenge) - } - } - } - - require.GreaterOrEqual(t, len(challengesToAccept), 1, "Need at least one challenge, got none") - return challengesToAccept - } - - acmeCert := doAcmeValidationWithGoLibrary(t, directoryUrl, acmeOrderIdentifiers, cr, provisioningFunc, "") - require.Contains(t, acmeCert.DNSNames, hostname) - require.Contains(t, acmeCert.DNSNames, wildcard) - require.Equal(t, wildcard, acmeCert.Subject.CommonName) - pki.RemoveDNSRecordsForDomain(hostname) - - // Redo validation with a role this time. - err = pki.UpdateRole("wildcard", map[string]interface{}{ - "key_type": "any", - "allowed_domains": "go-lang-wildcard-client.dadgarcorp.com", - "allow_subdomains": true, - "allow_bare_domains": true, - "allow_wildcard_certificates": true, - "client_flag": false, - }) - require.NoError(t, err, "failed creating role wildcard") - directoryUrl = basePath + "/roles/wildcard/acme/directory" - - acmeCert = doAcmeValidationWithGoLibrary(t, directoryUrl, acmeOrderIdentifiers, cr, provisioningFunc, "") - require.Contains(t, acmeCert.DNSNames, hostname) - require.Contains(t, acmeCert.DNSNames, wildcard) - require.Equal(t, wildcard, acmeCert.Subject.CommonName) - pki.RemoveDNSRecordsForDomain(hostname) -} - -func SubtestACMEPreventsICADNS(t *testing.T, cluster *VaultPkiCluster) { - pki, err := cluster.CreateAcmeMount("pki-dns-ica") - require.NoError(t, err, "failed setting up acme mount") - - // Since we interact with ACME from outside the container network the ACME - // configuration needs to be updated to use the host port and not the internal - // docker ip. - basePath, err := pki.UpdateClusterConfigLocalAddr() - require.NoError(t, err, "failed updating cluster config") - - hostname := "go-lang-intermediate-ca-cert.dadgarcorp.com" - - // Do validation without a role first. - directoryUrl := basePath + "/acme/directory" - acmeOrderIdentifiers := []acme.AuthzID{ - {Type: "dns", Value: hostname}, - } - cr := &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: hostname}, - DNSNames: []string{hostname}, - ExtraExtensions: []pkix.Extension{ - // Basic Constraint with IsCA asserted to true. - { - Id: certutil.ExtensionBasicConstraintsOID, - Critical: true, - Value: []byte{0x30, 0x03, 0x01, 0x01, 0xFF}, - }, - }, - } - - provisioningFunc := func(acmeClient *acme.Client, auths []*acme.Authorization) []*acme.Challenge { - // For each dns-01 challenge, place the record in the associated DNS resolver. - var challengesToAccept []*acme.Challenge - for _, auth := range auths { - for _, challenge := range auth.Challenges { - if challenge.Status != acme.StatusPending { - t.Logf("ignoring challenge not in status pending: %v", challenge) - continue - } - - if challenge.Type == "dns-01" { - challengeBody, err := acmeClient.DNS01ChallengeRecord(challenge.Token) - require.NoError(t, err, "failed generating challenge response") - - err = pki.AddDNSRecord("_acme-challenge."+auth.Identifier.Value, "TXT", challengeBody) - require.NoError(t, err, "failed setting DNS record") - - challengesToAccept = append(challengesToAccept, challenge) - } - } - } - - require.GreaterOrEqual(t, len(challengesToAccept), 1, "Need at least one challenge, got none") - return challengesToAccept - } - - doAcmeValidationWithGoLibrary(t, directoryUrl, acmeOrderIdentifiers, cr, provisioningFunc, "refusing to accept CSR with Basic Constraints extension") - pki.RemoveDNSRecordsForDomain(hostname) - - // Redo validation with a role this time. - err = pki.UpdateRole("ica", map[string]interface{}{ - "key_type": "any", - "allowed_domains": "go-lang-intermediate-ca-cert.dadgarcorp.com", - "allow_subdomains": true, - "allow_bare_domains": true, - "allow_wildcard_certificates": true, - "client_flag": false, - }) - require.NoError(t, err, "failed creating role wildcard") - directoryUrl = basePath + "/roles/ica/acme/directory" - - doAcmeValidationWithGoLibrary(t, directoryUrl, acmeOrderIdentifiers, cr, provisioningFunc, "refusing to accept CSR with Basic Constraints extension") - pki.RemoveDNSRecordsForDomain(hostname) -} - -// SubtestACMEStepDownNode Verify that we can properly run an ACME session through a -// secondary node, and midway through the challenge verification process, seal the -// active node and make sure we can complete the ACME session on the new active node. -func SubtestACMEStepDownNode(t *testing.T, cluster *VaultPkiCluster) { - pki, err := cluster.CreateAcmeMount("stepdown-test") - require.NoError(t, err) - - // Since we interact with ACME from outside the container network the ACME - // configuration needs to be updated to use the host port and not the internal - // docker ip. We also grab the non-active node here on purpose to verify - // ACME related APIs are properly forwarded across standby hosts. - nonActiveNodes := pki.GetNonActiveNodes() - require.GreaterOrEqual(t, len(nonActiveNodes), 1, "Need at least one non-active node") - - nonActiveNode := nonActiveNodes[0] - - basePath := fmt.Sprintf("https://%s/v1/%s", nonActiveNode.HostPort, pki.mount) - err = pki.UpdateClusterConfig(map[string]interface{}{ - "path": basePath, - }) - - hostname := "go-lang-stepdown-client.dadgarcorp.com" - - acmeOrderIdentifiers := []acme.AuthzID{ - {Type: "dns", Value: hostname}, - } - cr := &x509.CertificateRequest{ - DNSNames: []string{hostname, hostname}, - } - - accountKey, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err, "failed creating rsa account key") - - acmeClient := &acme.Client{ - Key: accountKey, - HTTPClient: &http.Client{Transport: &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - }}, - DirectoryURL: basePath + "/acme/directory", - } - - testCtx, cancelFunc := context.WithTimeout(context.Background(), 2*time.Minute) - defer cancelFunc() - - // Create new account - _, err = acmeClient.Register(testCtx, &acme.Account{Contact: []string{"mailto:ipsans@dadgarcorp.com"}}, - func(tosURL string) bool { return true }) - require.NoError(t, err, "failed registering account") - - // Create an ACME order - order, err := acmeClient.AuthorizeOrder(testCtx, acmeOrderIdentifiers) - require.NoError(t, err, "failed creating ACME order") - - require.Len(t, order.AuthzURLs, 1, "expected a single authz url") - authUrl := order.AuthzURLs[0] - - authorization, err := acmeClient.GetAuthorization(testCtx, authUrl) - require.NoError(t, err, "failed to lookup authorization at url: %s", authUrl) - - dnsTxtRecordsToAdd := map[string]string{} - - var challengesToAccept []*acme.Challenge - for _, challenge := range authorization.Challenges { - if challenge.Status != acme.StatusPending { - t.Logf("ignoring challenge not in status pending: %v", challenge) - continue - } - - if challenge.Type == "dns-01" { - challengeBody, err := acmeClient.DNS01ChallengeRecord(challenge.Token) - require.NoError(t, err, "failed generating challenge response") - - // Collect the challenges for us to add the DNS records after step-down - dnsTxtRecordsToAdd["_acme-challenge."+authorization.Identifier.Value] = challengeBody - challengesToAccept = append(challengesToAccept, challenge) - } - } - - // Tell the ACME server, that they can now validate those challenges, this will cause challenge - // verification failures on the main node as the DNS records do not exist. - for _, challenge := range challengesToAccept { - _, err = acmeClient.Accept(testCtx, challenge) - require.NoError(t, err, "failed to accept challenge: %v", challenge) - } - - // Now wait till we start seeing the challenge engine start failing the lookups. - testhelpers.RetryUntil(t, 10*time.Second, func() error { - myAuth, err := acmeClient.GetAuthorization(testCtx, authUrl) - require.NoError(t, err, "failed to lookup authorization at url: %s", authUrl) - - for _, challenge := range myAuth.Challenges { - if challenge.Error != nil { - // The engine failed on one of the challenges, we are done waiting - return nil - } - } - - return fmt.Errorf("no challenges for auth %v contained any errors", myAuth.Identifier) - }) - - // Seal the active node now and wait for the next node to appear - previousActiveNode := pki.GetActiveClusterNode() - t.Logf("Stepping down node id: %s", previousActiveNode.NodeID) - - haStatus, _ := previousActiveNode.APIClient().Sys().HAStatus() - t.Logf("Node: %v HaStatus: %v\n", previousActiveNode.NodeID, haStatus) - - testhelpers.RetryUntil(t, 2*time.Minute, func() error { - state, err := previousActiveNode.APIClient().Sys().RaftAutopilotState() - if err != nil { - return err - } - - t.Logf("Node: %v Raft AutoPilotState: %v\n", previousActiveNode.NodeID, state) - - if !state.Healthy { - return fmt.Errorf("raft auto pilot state is not healthy") - } - - // Make sure that we have at least one node that can take over prior to sealing the current active node. - if state.FailureTolerance < 1 { - msg := fmt.Sprintf("there is no fault tolerance within raft state yet: %d", state.FailureTolerance) - t.Log(msg) - return errors.New(msg) - } - - return nil - }) - - t.Logf("Sealing active node") - err = previousActiveNode.APIClient().Sys().Seal() - require.NoError(t, err, "failed stepping down node") - - // Add our DNS records now - t.Logf("Adding DNS records") - for dnsHost, dnsValue := range dnsTxtRecordsToAdd { - err = pki.AddDNSRecord(dnsHost, "TXT", dnsValue) - require.NoError(t, err, "failed adding DNS record: %s:%s", dnsHost, dnsValue) - } - - // Wait for our new active node to come up - testhelpers.RetryUntil(t, 2*time.Minute, func() error { - newNode := pki.GetActiveClusterNode() - if newNode.NodeID == previousActiveNode.NodeID { - return fmt.Errorf("existing node is still the leader after stepdown: %s", newNode.NodeID) - } - - t.Logf("New active node has node id: %v", newNode.NodeID) - return nil - }) - - // Wait for the order/challenges to be validated. - _, err = acmeClient.WaitOrder(testCtx, order.URI) - if err != nil { - // We failed waiting for the order to become ready, lets print out current challenge statuses to help debugging - myAuth, authErr := acmeClient.GetAuthorization(testCtx, authUrl) - require.NoError(t, authErr, "failed to lookup authorization at url: %s and wait order failed with: %v", authUrl, err) - - t.Logf("Authorization Status: %s", myAuth.Status) - for _, challenge := range myAuth.Challenges { - // The engine failed on one of the challenges, we are done waiting - t.Logf("challenge: %v state: %v Error: %v", challenge.Type, challenge.Status, challenge.Error) - } - - require.NoError(t, err, "failed waiting for order to be ready") - } - - // Create/sign the CSR and ask ACME server to sign it returning us the final certificate - csrKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - csr, err := x509.CreateCertificateRequest(rand.Reader, cr, csrKey) - require.NoError(t, err, "failed generating csr") - - certs, _, err := acmeClient.CreateOrderCert(testCtx, order.FinalizeURL, csr, false) - require.NoError(t, err, "failed to get a certificate back from ACME") - - _, err = x509.ParseCertificate(certs[0]) - require.NoError(t, err, "failed parsing acme cert bytes") -} - -func getDockerLog(t *testing.T) (func(s string), *pkiext.LogConsumerWriter, *pkiext.LogConsumerWriter) { - logConsumer := func(s string) { - t.Logf(s) - } - - logStdout := &pkiext.LogConsumerWriter{logConsumer} - logStderr := &pkiext.LogConsumerWriter{logConsumer} - return logConsumer, logStdout, logStderr -} diff --git a/builtin/logical/pkiext/pkiext_binary/pki_cluster.go b/builtin/logical/pkiext/pkiext_binary/pki_cluster.go deleted file mode 100644 index da33300934baae..00000000000000 --- a/builtin/logical/pkiext/pkiext_binary/pki_cluster.go +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pkiext_binary - -import ( - "context" - "fmt" - "os" - "testing" - "time" - - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/builtin/logical/pki/dnstest" - dockhelper "github.com/hashicorp/vault/sdk/helper/docker" - "github.com/hashicorp/vault/sdk/helper/testcluster" - "github.com/hashicorp/vault/sdk/helper/testcluster/docker" -) - -type VaultPkiCluster struct { - cluster *docker.DockerCluster - Dns *dnstest.TestServer -} - -func NewVaultPkiCluster(t *testing.T) *VaultPkiCluster { - binary := os.Getenv("VAULT_BINARY") - if binary == "" { - t.Skip("only running docker test when $VAULT_BINARY present") - } - - opts := &docker.DockerClusterOptions{ - ImageRepo: "docker.mirror.hashicorp.services/hashicorp/vault", - // We're replacing the binary anyway, so we're not too particular about - // the docker image version tag. - ImageTag: "latest", - VaultBinary: binary, - ClusterOptions: testcluster.ClusterOptions{ - VaultNodeConfig: &testcluster.VaultNodeConfig{ - LogLevel: "TRACE", - }, - NumCores: 3, - }, - } - - cluster := docker.NewTestDockerCluster(t, opts) - - return &VaultPkiCluster{cluster: cluster} -} - -func NewVaultPkiClusterWithDNS(t *testing.T) *VaultPkiCluster { - cluster := NewVaultPkiCluster(t) - dns := dnstest.SetupResolverOnNetwork(t, "dadgarcorp.com", cluster.GetContainerNetworkName()) - cluster.Dns = dns - return cluster -} - -func (vpc *VaultPkiCluster) Cleanup() { - vpc.cluster.Cleanup() - if vpc.Dns != nil { - vpc.Dns.Cleanup() - } -} - -func (vpc *VaultPkiCluster) GetActiveClusterNode() *docker.DockerClusterNode { - ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) - defer cancel() - - node, err := testcluster.WaitForActiveNode(ctx, vpc.cluster) - if err != nil { - panic(fmt.Sprintf("no cluster node became active in timeout window: %v", err)) - } - - return vpc.cluster.ClusterNodes[node] -} - -func (vpc *VaultPkiCluster) GetNonActiveNodes() []*docker.DockerClusterNode { - nodes := []*docker.DockerClusterNode{} - for _, node := range vpc.cluster.ClusterNodes { - leader, err := node.APIClient().Sys().Leader() - if err != nil { - continue - } - - if !leader.IsSelf { - nodes = append(nodes, node) - } - } - - return nodes -} - -func (vpc *VaultPkiCluster) GetActiveContainerHostPort() string { - return vpc.GetActiveClusterNode().HostPort -} - -func (vpc *VaultPkiCluster) GetContainerNetworkName() string { - return vpc.cluster.ClusterNodes[0].ContainerNetworkName -} - -func (vpc *VaultPkiCluster) GetActiveContainerIP() string { - return vpc.GetActiveClusterNode().ContainerIPAddress -} - -func (vpc *VaultPkiCluster) GetActiveContainerID() string { - return vpc.GetActiveClusterNode().Container.ID -} - -func (vpc *VaultPkiCluster) GetActiveNode() *api.Client { - return vpc.GetActiveClusterNode().APIClient() -} - -func (vpc *VaultPkiCluster) AddHostname(hostname, ip string) error { - if vpc.Dns != nil { - vpc.Dns.AddRecord(hostname, "A", ip) - vpc.Dns.PushConfig() - return nil - } else { - return vpc.AddNameToHostFiles(hostname, ip) - } -} - -func (vpc *VaultPkiCluster) AddNameToHostFiles(hostname, ip string) error { - updateHostsCmd := []string{ - "sh", "-c", - "echo '" + ip + " " + hostname + "' >> /etc/hosts", - } - for _, node := range vpc.cluster.ClusterNodes { - containerID := node.Container.ID - _, _, retcode, err := dockhelper.RunCmdWithOutput(vpc.cluster.DockerAPI, context.Background(), containerID, updateHostsCmd) - if err != nil { - return fmt.Errorf("failed updating container %s host file: %w", containerID, err) - } - - if retcode != 0 { - return fmt.Errorf("expected zero retcode from updating vault host file in container %s got: %d", containerID, retcode) - } - } - - return nil -} - -func (vpc *VaultPkiCluster) AddDNSRecord(hostname, recordType, ip string) error { - if vpc.Dns == nil { - return fmt.Errorf("no DNS server was provisioned on this cluster group; unable to provision custom records") - } - - vpc.Dns.AddRecord(hostname, recordType, ip) - vpc.Dns.PushConfig() - return nil -} - -func (vpc *VaultPkiCluster) RemoveDNSRecord(domain string, record string, value string) error { - if vpc.Dns == nil { - return fmt.Errorf("no DNS server was provisioned on this cluster group; unable to remove specific record") - } - - vpc.Dns.RemoveRecord(domain, record, value) - return nil -} - -func (vpc *VaultPkiCluster) RemoveDNSRecordsOfTypeForDomain(domain string, record string) error { - if vpc.Dns == nil { - return fmt.Errorf("no DNS server was provisioned on this cluster group; unable to remove all records of type") - } - - vpc.Dns.RemoveRecordsOfTypeForDomain(domain, record) - return nil -} - -func (vpc *VaultPkiCluster) RemoveDNSRecordsForDomain(domain string) error { - if vpc.Dns == nil { - return fmt.Errorf("no DNS server was provisioned on this cluster group; unable to remove records for domain") - } - - vpc.Dns.RemoveRecordsForDomain(domain) - return nil -} - -func (vpc *VaultPkiCluster) RemoveAllDNSRecords() error { - if vpc.Dns == nil { - return fmt.Errorf("no DNS server was provisioned on this cluster group; unable to remove all records") - } - - vpc.Dns.RemoveAllRecords() - return nil -} - -func (vpc *VaultPkiCluster) CreateMount(name string) (*VaultPkiMount, error) { - err := vpc.GetActiveNode().Sys().Mount(name, &api.MountInput{ - Type: "pki", - Config: api.MountConfigInput{ - DefaultLeaseTTL: "16h", - MaxLeaseTTL: "32h", - AllowedResponseHeaders: []string{ - "Last-Modified", "Replay-Nonce", - "Link", "Location", - }, - }, - }) - if err != nil { - return nil, err - } - - return &VaultPkiMount{ - vpc, - name, - }, nil -} - -func (vpc *VaultPkiCluster) CreateAcmeMount(mountName string) (*VaultPkiMount, error) { - pki, err := vpc.CreateMount(mountName) - if err != nil { - return nil, fmt.Errorf("failed creating mount %s: %w", mountName, err) - } - - err = pki.UpdateClusterConfig(nil) - if err != nil { - return nil, fmt.Errorf("failed updating cluster config: %w", err) - } - - cfg := map[string]interface{}{ - "eab_policy": "not-required", - } - if vpc.Dns != nil { - cfg["dns_resolver"] = vpc.Dns.GetRemoteAddr() - } - - err = pki.UpdateAcmeConfig(true, cfg) - if err != nil { - return nil, fmt.Errorf("failed updating acme config: %w", err) - } - - // Setup root+intermediate CA hierarchy within this mount. - resp, err := pki.GenerateRootInternal(map[string]interface{}{ - "common_name": "Root X1", - "country": "US", - "organization": "Dadgarcorp", - "ou": "QA", - "key_type": "ec", - "key_bits": 256, - "use_pss": false, - "issuer_name": "root", - }) - if err != nil { - return nil, fmt.Errorf("failed generating root internal: %w", err) - } - if resp == nil || len(resp.Data) == 0 { - return nil, fmt.Errorf("failed generating root internal: nil or empty response but no error") - } - - resp, err = pki.GenerateIntermediateInternal(map[string]interface{}{ - "common_name": "Intermediate I1", - "country": "US", - "organization": "Dadgarcorp", - "ou": "QA", - "key_type": "ec", - "key_bits": 256, - "use_pss": false, - }) - if err != nil { - return nil, fmt.Errorf("failed generating int csr: %w", err) - } - if resp == nil || len(resp.Data) == 0 { - return nil, fmt.Errorf("failed generating int csr: nil or empty response but no error") - } - - resp, err = pki.SignIntermediary("default", resp.Data["csr"], map[string]interface{}{ - "common_name": "Intermediate I1", - "country": "US", - "organization": "Dadgarcorp", - "ou": "QA", - "key_type": "ec", - "csr": resp.Data["csr"], - }) - if err != nil { - return nil, fmt.Errorf("failed signing int csr: %w", err) - } - if resp == nil || len(resp.Data) == 0 { - return nil, fmt.Errorf("failed signing int csr: nil or empty response but no error") - } - intCert := resp.Data["certificate"].(string) - - resp, err = pki.ImportBundle(intCert, nil) - if err != nil { - return nil, fmt.Errorf("failed importing signed cert: %w", err) - } - if resp == nil || len(resp.Data) == 0 { - return nil, fmt.Errorf("failed importing signed cert: nil or empty response but no error") - } - - err = pki.UpdateDefaultIssuer(resp.Data["imported_issuers"].([]interface{})[0].(string), nil) - if err != nil { - return nil, fmt.Errorf("failed to set intermediate as default: %w", err) - } - - err = pki.UpdateIssuer("default", map[string]interface{}{ - "leaf_not_after_behavior": "truncate", - }) - if err != nil { - return nil, fmt.Errorf("failed to update intermediate ttl behavior: %w", err) - } - - err = pki.UpdateIssuer("root", map[string]interface{}{ - "leaf_not_after_behavior": "truncate", - }) - if err != nil { - return nil, fmt.Errorf("failed to update root ttl behavior: %w", err) - } - - return pki, nil -} diff --git a/builtin/logical/pkiext/pkiext_binary/pki_mount.go b/builtin/logical/pkiext/pkiext_binary/pki_mount.go deleted file mode 100644 index f949cb6ae56a68..00000000000000 --- a/builtin/logical/pkiext/pkiext_binary/pki_mount.go +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package pkiext_binary - -import ( - "context" - "encoding/base64" - "fmt" - "path" - - "github.com/hashicorp/vault/api" -) - -type VaultPkiMount struct { - *VaultPkiCluster - mount string -} - -func (vpm *VaultPkiMount) UpdateClusterConfig(config map[string]interface{}) error { - defaultPath := "https://" + vpm.cluster.ClusterNodes[0].ContainerIPAddress + ":8200/v1/" + vpm.mount - defaults := map[string]interface{}{ - "path": defaultPath, - "aia_path": defaultPath, - } - - _, err := vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), - vpm.mount+"/config/cluster", mergeWithDefaults(config, defaults)) - return err -} - -func (vpm *VaultPkiMount) UpdateClusterConfigLocalAddr() (string, error) { - basePath := fmt.Sprintf("https://%s/v1/%s", vpm.GetActiveContainerHostPort(), vpm.mount) - return basePath, vpm.UpdateClusterConfig(map[string]interface{}{ - "path": basePath, - }) -} - -func (vpm *VaultPkiMount) UpdateAcmeConfig(enable bool, config map[string]interface{}) error { - defaults := map[string]interface{}{ - "enabled": enable, - } - - _, err := vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), - vpm.mount+"/config/acme", mergeWithDefaults(config, defaults)) - return err -} - -func (vpm *VaultPkiMount) GenerateRootInternal(props map[string]interface{}) (*api.Secret, error) { - defaults := map[string]interface{}{ - "common_name": "root-test.com", - "key_type": "ec", - "issuer_name": "root", - } - - return vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), - vpm.mount+"/root/generate/internal", mergeWithDefaults(props, defaults)) -} - -func (vpm *VaultPkiMount) GenerateIntermediateInternal(props map[string]interface{}) (*api.Secret, error) { - defaults := map[string]interface{}{ - "common_name": "intermediary-test.com", - "key_type": "ec", - "issuer_name": "intermediary", - } - - return vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), - vpm.mount+"/intermediate/generate/internal", mergeWithDefaults(props, defaults)) -} - -func (vpm *VaultPkiMount) SignIntermediary(signingIssuer string, csr interface{}, props map[string]interface{}) (*api.Secret, error) { - defaults := map[string]interface{}{ - "csr": csr, - } - - return vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), - vpm.mount+"/issuer/"+signingIssuer+"/sign-intermediate", - mergeWithDefaults(props, defaults)) -} - -func (vpm *VaultPkiMount) ImportBundle(pemBundle interface{}, props map[string]interface{}) (*api.Secret, error) { - defaults := map[string]interface{}{ - "pem_bundle": pemBundle, - } - - return vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), - vpm.mount+"/issuers/import/bundle", mergeWithDefaults(props, defaults)) -} - -func (vpm *VaultPkiMount) UpdateDefaultIssuer(issuerId string, props map[string]interface{}) error { - defaults := map[string]interface{}{ - "default": issuerId, - } - - _, err := vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), - vpm.mount+"/config/issuers", mergeWithDefaults(props, defaults)) - - return err -} - -func (vpm *VaultPkiMount) UpdateIssuer(issuerRef string, props map[string]interface{}) error { - defaults := map[string]interface{}{} - - _, err := vpm.GetActiveNode().Logical().JSONMergePatch(context.Background(), - vpm.mount+"/issuer/"+issuerRef, mergeWithDefaults(props, defaults)) - - return err -} - -func (vpm *VaultPkiMount) UpdateRole(roleName string, config map[string]interface{}) error { - defaults := map[string]interface{}{} - - _, err := vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), - vpm.mount+"/roles/"+roleName, mergeWithDefaults(config, defaults)) - - return err -} - -func (vpm *VaultPkiMount) GetEabKey(acmeDirectory string) (string, string, error) { - eabPath := path.Join(vpm.mount, acmeDirectory, "/new-eab") - resp, err := vpm.GetActiveNode().Logical().WriteWithContext(context.Background(), eabPath, map[string]interface{}{}) - if err != nil { - return "", "", fmt.Errorf("failed fetching eab from %s: %w", eabPath, err) - } - eabId := resp.Data["id"].(string) - base64EabKey := resp.Data["key"].(string) - // just make sure we get something valid back from the server, we still want to pass back the base64 version - // to the caller... - _, err = base64.RawURLEncoding.DecodeString(base64EabKey) - if err != nil { - return "", "", fmt.Errorf("failed decoding key response field: %s: %w", base64EabKey, err) - } - return eabId, base64EabKey, nil -} - -func mergeWithDefaults(config map[string]interface{}, defaults map[string]interface{}) map[string]interface{} { - myConfig := config - if myConfig == nil { - myConfig = map[string]interface{}{} - } - for key, value := range defaults { - if origVal, exists := config[key]; !exists { - myConfig[key] = value - } else { - myConfig[key] = origVal - } - } - - return myConfig -} diff --git a/builtin/logical/pkiext/test_helpers.go b/builtin/logical/pkiext/test_helpers.go index 38bbdfe112b55e..942c37a4a381b6 100644 --- a/builtin/logical/pkiext/test_helpers.go +++ b/builtin/logical/pkiext/test_helpers.go @@ -1,11 +1,6 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pkiext import ( - "bufio" - "bytes" "crypto" "crypto/x509" "encoding/pem" @@ -68,19 +63,3 @@ func parseKey(t *testing.T, pemKey string) crypto.Signer { require.NoError(t, err) return key } - -type LogConsumerWriter struct { - Consumer func(string) -} - -func (l LogConsumerWriter) Write(p []byte) (n int, err error) { - // TODO this assumes that we're never passed partial log lines, which - // seems a safe assumption for now based on how docker looks to implement - // logging, but might change in the future. - scanner := bufio.NewScanner(bytes.NewReader(p)) - scanner.Buffer(make([]byte, 64*1024), bufio.MaxScanTokenSize) - for scanner.Scan() { - l.Consumer(scanner.Text()) - } - return len(p), nil -} diff --git a/builtin/logical/pkiext/zlint_test.go b/builtin/logical/pkiext/zlint_test.go index 2f20152605823a..bf0d1e636b53cb 100644 --- a/builtin/logical/pkiext/zlint_test.go +++ b/builtin/logical/pkiext/zlint_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pkiext import ( @@ -10,7 +7,8 @@ import ( "testing" "github.com/hashicorp/vault/builtin/logical/pki" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" + "github.com/stretchr/testify/require" ) @@ -66,12 +64,11 @@ func RunZLintContainer(t *testing.T, certificate string) []byte { buildZLintContainer(t) }) - ctx := context.Background() // We don't actually care about the address, we just want to start the // container so we can run commands in it. We'd ideally like to skip this // step and only build a new image, but the zlint output would be // intermingled with container build stages, so its not that useful. - result, err := zRunner.Start(ctx, true, false) + ctr, _, _, err := zRunner.Start(context.Background(), true, false) if err != nil { t.Fatalf("Could not start golang container for zlint: %s", err) } @@ -79,13 +76,13 @@ func RunZLintContainer(t *testing.T, certificate string) []byte { // Copy the cert into the newly running container. certCtx := docker.NewBuildContext() certCtx["cert.pem"] = docker.PathContentsFromBytes([]byte(certificate)) - if err := zRunner.CopyTo(result.Container.ID, "/go/", certCtx); err != nil { + if err := zRunner.CopyTo(ctr.ID, "/go/", certCtx); err != nil { t.Fatalf("Could not copy certificate into container: %v", err) } // Run the zlint command and save the output. cmd := []string{"/go/bin/zlint", "/go/cert.pem"} - stdout, stderr, retcode, err := zRunner.RunCmdWithOutput(ctx, result.Container.ID, cmd) + stdout, stderr, retcode, err := zRunner.RunCmdWithOutput(context.Background(), ctr.ID, cmd) if err != nil { t.Fatalf("Could not run command in container: %v", err) } @@ -100,7 +97,7 @@ func RunZLintContainer(t *testing.T, certificate string) []byte { } // Clean up after ourselves. - if err := zRunner.Stop(context.Background(), result.Container.ID); err != nil { + if err := zRunner.Stop(context.Background(), ctr.ID); err != nil { t.Fatalf("failed to stop container: %v", err) } diff --git a/builtin/logical/rabbitmq/backend.go b/builtin/logical/rabbitmq/backend.go index 20ad1afd18f1ec..d1f223810a65ad 100644 --- a/builtin/logical/rabbitmq/backend.go +++ b/builtin/logical/rabbitmq/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( @@ -14,8 +11,6 @@ import ( rabbithole "github.com/michaelklishin/rabbit-hole/v2" ) -const operationPrefixRabbitMQ = "rabbit-mq" - // Factory creates and configures the backend func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b := Backend() diff --git a/builtin/logical/rabbitmq/backend_test.go b/builtin/logical/rabbitmq/backend_test.go index 61b18dac9d1ce6..7df1384fefa481 100644 --- a/builtin/logical/rabbitmq/backend_test.go +++ b/builtin/logical/rabbitmq/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( @@ -11,8 +8,8 @@ import ( "testing" "github.com/hashicorp/go-secure-stdlib/base62" + "github.com/hashicorp/vault/helper/testhelpers/docker" logicaltest "github.com/hashicorp/vault/helper/testhelpers/logical" - "github.com/hashicorp/vault/sdk/helper/docker" "github.com/hashicorp/vault/sdk/helper/jsonutil" "github.com/hashicorp/vault/sdk/logical" rabbithole "github.com/michaelklishin/rabbit-hole/v2" diff --git a/builtin/logical/rabbitmq/cmd/rabbitmq/main.go b/builtin/logical/rabbitmq/cmd/rabbitmq/main.go index 2cb62daca51abe..516f699eaee6c1 100644 --- a/builtin/logical/rabbitmq/cmd/rabbitmq/main.go +++ b/builtin/logical/rabbitmq/cmd/rabbitmq/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: rabbitmq.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/logical/rabbitmq/passwords.go b/builtin/logical/rabbitmq/passwords.go index 8ba08a0afa4b55..01bfd41f0db2b3 100644 --- a/builtin/logical/rabbitmq/passwords.go +++ b/builtin/logical/rabbitmq/passwords.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( diff --git a/builtin/logical/rabbitmq/path_config_connection.go b/builtin/logical/rabbitmq/path_config_connection.go index 897945512338a4..51abe2547aa194 100644 --- a/builtin/logical/rabbitmq/path_config_connection.go +++ b/builtin/logical/rabbitmq/path_config_connection.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( @@ -20,13 +17,6 @@ const ( func pathConfigConnection(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/connection", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRabbitMQ, - OperationVerb: "configure", - OperationSuffix: "connection", - }, - Fields: map[string]*framework.FieldSchema{ "connection_uri": { Type: framework.TypeString, diff --git a/builtin/logical/rabbitmq/path_config_connection_test.go b/builtin/logical/rabbitmq/path_config_connection_test.go index 55e6b2cd042cb9..dddee8f0c9b886 100644 --- a/builtin/logical/rabbitmq/path_config_connection_test.go +++ b/builtin/logical/rabbitmq/path_config_connection_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( diff --git a/builtin/logical/rabbitmq/path_config_lease.go b/builtin/logical/rabbitmq/path_config_lease.go index 9436f3f4924fad..0b6bb572188d50 100644 --- a/builtin/logical/rabbitmq/path_config_lease.go +++ b/builtin/logical/rabbitmq/path_config_lease.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( @@ -15,11 +12,6 @@ import ( func pathConfigLease(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/lease", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRabbitMQ, - }, - Fields: map[string]*framework.FieldSchema{ "ttl": { Type: framework.TypeDurationSecond, @@ -33,21 +25,9 @@ func pathConfigLease(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathLeaseRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "read", - OperationSuffix: "lease-configuration", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathLeaseUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "lease", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathLeaseRead, + logical.UpdateOperation: b.pathLeaseUpdate, }, HelpSynopsis: pathConfigLeaseHelpSyn, diff --git a/builtin/logical/rabbitmq/path_config_lease_test.go b/builtin/logical/rabbitmq/path_config_lease_test.go index 9e565c56f69b13..ec7e7e169c0879 100644 --- a/builtin/logical/rabbitmq/path_config_lease_test.go +++ b/builtin/logical/rabbitmq/path_config_lease_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( diff --git a/builtin/logical/rabbitmq/path_role_create.go b/builtin/logical/rabbitmq/path_role_create.go index fd3f256ffbfc1b..5ad1ff6bdf4171 100644 --- a/builtin/logical/rabbitmq/path_role_create.go +++ b/builtin/logical/rabbitmq/path_role_create.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( @@ -21,13 +18,6 @@ const ( func pathCreds(b *backend) *framework.Path { return &framework.Path{ Pattern: "creds/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRabbitMQ, - OperationVerb: "request", - OperationSuffix: "credentials", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/rabbitmq/path_role_create_test.go b/builtin/logical/rabbitmq/path_role_create_test.go index ecb9746872513a..2c3d5f4b86d666 100644 --- a/builtin/logical/rabbitmq/path_role_create_test.go +++ b/builtin/logical/rabbitmq/path_role_create_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( diff --git a/builtin/logical/rabbitmq/path_roles.go b/builtin/logical/rabbitmq/path_roles.go index 98c2f3d1f487d6..2031c7d99ec593 100644 --- a/builtin/logical/rabbitmq/path_roles.go +++ b/builtin/logical/rabbitmq/path_roles.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( @@ -16,10 +13,6 @@ import ( func pathListRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRabbitMQ, - OperationSuffix: "roles", - }, Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathRoleList, }, @@ -31,10 +24,6 @@ func pathListRoles(b *backend) *framework.Path { func pathRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/" + framework.GenericNameRegex("name"), - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixRabbitMQ, - OperationSuffix: "role", - }, Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/rabbitmq/secret_creds.go b/builtin/logical/rabbitmq/secret_creds.go index 2d0cce30858b6e..b31dfc7188aa51 100644 --- a/builtin/logical/rabbitmq/secret_creds.go +++ b/builtin/logical/rabbitmq/secret_creds.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package rabbitmq import ( diff --git a/builtin/logical/ssh/backend.go b/builtin/logical/ssh/backend.go index 0606b3eae56f4b..454937f16f253a 100644 --- a/builtin/logical/ssh/backend.go +++ b/builtin/logical/ssh/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -13,8 +10,6 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -const operationPrefixSSH = "ssh" - type backend struct { *framework.Backend view logical.Storage diff --git a/builtin/logical/ssh/backend_test.go b/builtin/logical/ssh/backend_test.go index 13f9f73624af47..6382d61d210e14 100644 --- a/builtin/logical/ssh/backend_test.go +++ b/builtin/logical/ssh/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -16,16 +13,18 @@ import ( "time" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/builtin/credential/userpass" "github.com/hashicorp/vault/helper/testhelpers/corehelpers" + "github.com/hashicorp/vault/sdk/logical" + "golang.org/x/crypto/ssh" + + "github.com/hashicorp/vault/builtin/credential/userpass" + "github.com/hashicorp/vault/helper/testhelpers/docker" logicaltest "github.com/hashicorp/vault/helper/testhelpers/logical" vaulthttp "github.com/hashicorp/vault/http" - "github.com/hashicorp/vault/sdk/helper/docker" - "github.com/hashicorp/vault/sdk/logical" "github.com/hashicorp/vault/vault" "github.com/mitchellh/mapstructure" + "github.com/stretchr/testify/require" - "golang.org/x/crypto/ssh" ) const ( @@ -132,8 +131,6 @@ SjOQL/GkH1nkRcDS9++aAAAAAmNhAQID dockerImageTagSupportsNoRSA1 = "8.4_p1-r3-ls48" ) -var ctx = context.Background() - func prepareTestContainer(t *testing.T, tag, caPublicKeyPEM string) (func(), string) { if tag == "" { tag = dockerImageTagSupportsNoRSA1 @@ -948,63 +945,12 @@ cKumubUxOfFdy1ZvAAAAEm5jY0BtYnAudWJudC5sb2NhbA== return nil }, }, - testIssueCert("testcarole", "ec", testUserName, sshAddress, expectError), - testIssueCert("testcarole", "ed25519", testUserName, sshAddress, expectError), - testIssueCert("testcarole", "rsa", testUserName, sshAddress, expectError), }, } logicaltest.Test(t, testCase) } -func testIssueCert(role string, keyType string, testUserName string, sshAddress string, expectError bool) logicaltest.TestStep { - return logicaltest.TestStep{ - Operation: logical.UpdateOperation, - Path: "issue/" + role, - ErrorOk: expectError, - Data: map[string]interface{}{ - "key_type": keyType, - "valid_principals": testUserName, - }, - - Check: func(resp *logical.Response) error { - // Tolerate nil response if an error was expected - if expectError && resp == nil { - return nil - } - - signedKey := strings.TrimSpace(resp.Data["signed_key"].(string)) - if signedKey == "" { - return errors.New("no signed key in response") - } - - privKey, err := ssh.ParsePrivateKey([]byte(resp.Data["private_key"].(string))) - if err != nil { - return fmt.Errorf("error parsing private key: %v", err) - } - - parsedKey, _, _, _, err := ssh.ParseAuthorizedKey([]byte(signedKey)) - if err != nil { - return fmt.Errorf("error parsing signed key: %v", err) - } - certSigner, err := ssh.NewCertSigner(parsedKey.(*ssh.Certificate), privKey) - if err != nil { - return err - } - - err = testSSH(testUserName, sshAddress, ssh.PublicKeys(certSigner), "date") - if expectError && err == nil { - return fmt.Errorf("expected error but got none") - } - if !expectError && err != nil { - return err - } - - return nil - }, - } -} - func TestSSHBackend_CAUpgradeAlgorithmSigner(t *testing.T) { cleanup, sshAddress := prepareTestContainer(t, dockerImageTagSupportsRSA1, testCAPublicKey) defer cleanup() @@ -2516,326 +2462,3 @@ func TestBackend_CleanupDynamicHostKeys(t *testing.T) { require.NotNil(t, resp.Data["message"]) require.Contains(t, resp.Data["message"], "0 of 0") } - -type pathAuthCheckerFunc func(t *testing.T, client *api.Client, path string, token string) - -func isPermDenied(err error) bool { - return strings.Contains(err.Error(), "permission denied") -} - -func isUnsupportedPathOperation(err error) bool { - return strings.Contains(err.Error(), "unsupported path") || strings.Contains(err.Error(), "unsupported operation") -} - -func isDeniedOp(err error) bool { - return isPermDenied(err) || isUnsupportedPathOperation(err) -} - -func pathShouldBeAuthed(t *testing.T, client *api.Client, path string, token string) { - client.SetToken("") - resp, err := client.Logical().ReadWithContext(ctx, path) - if err == nil || !isPermDenied(err) { - t.Fatalf("expected failure to read %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().ListWithContext(ctx, path) - if err == nil || !isPermDenied(err) { - t.Fatalf("expected failure to list %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().WriteWithContext(ctx, path, map[string]interface{}{}) - if err == nil || !isPermDenied(err) { - t.Fatalf("expected failure to write %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().DeleteWithContext(ctx, path) - if err == nil || !isPermDenied(err) { - t.Fatalf("expected failure to delete %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().JSONMergePatch(ctx, path, map[string]interface{}{}) - if err == nil || !isPermDenied(err) { - t.Fatalf("expected failure to patch %v while unauthed: %v / %v", path, err, resp) - } -} - -func pathShouldBeUnauthedReadList(t *testing.T, client *api.Client, path string, token string) { - // Should be able to read both with and without a token. - client.SetToken("") - resp, err := client.Logical().ReadWithContext(ctx, path) - if err != nil && isPermDenied(err) { - // Read will sometimes return permission denied, when the handler - // does not support the given operation. Retry with the token. - client.SetToken(token) - resp2, err2 := client.Logical().ReadWithContext(ctx, path) - if err2 != nil && !isUnsupportedPathOperation(err2) { - t.Fatalf("unexpected failure to read %v while unauthed: %v / %v\nWhile authed: %v / %v", path, err, resp, err2, resp2) - } - client.SetToken("") - } - resp, err = client.Logical().ListWithContext(ctx, path) - if err != nil && isPermDenied(err) { - // List will sometimes return permission denied, when the handler - // does not support the given operation. Retry with the token. - client.SetToken(token) - resp2, err2 := client.Logical().ListWithContext(ctx, path) - if err2 != nil && !isUnsupportedPathOperation(err2) { - t.Fatalf("unexpected failure to list %v while unauthed: %v / %v\nWhile authed: %v / %v", path, err, resp, err2, resp2) - } - client.SetToken("") - } - - // These should all be denied. - resp, err = client.Logical().WriteWithContext(ctx, path, map[string]interface{}{}) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during write on read-only path %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().DeleteWithContext(ctx, path) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during delete on read-only path %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().JSONMergePatch(ctx, path, map[string]interface{}{}) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during patch on read-only path %v while unauthed: %v / %v", path, err, resp) - } - - // Retrying with token should allow read/list, but not modification still. - client.SetToken(token) - resp, err = client.Logical().ReadWithContext(ctx, path) - if err != nil && isPermDenied(err) { - t.Fatalf("unexpected failure to read %v while authed: %v / %v", path, err, resp) - } - resp, err = client.Logical().ListWithContext(ctx, path) - if err != nil && isPermDenied(err) { - t.Fatalf("unexpected failure to list %v while authed: %v / %v", path, err, resp) - } - - // Should all be denied. - resp, err = client.Logical().WriteWithContext(ctx, path, map[string]interface{}{}) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during write on read-only path %v while authed: %v / %v", path, err, resp) - } - resp, err = client.Logical().DeleteWithContext(ctx, path) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during delete on read-only path %v while authed: %v / %v", path, err, resp) - } - resp, err = client.Logical().JSONMergePatch(ctx, path, map[string]interface{}{}) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during patch on read-only path %v while authed: %v / %v", path, err, resp) - } -} - -func pathShouldBeUnauthedWriteOnly(t *testing.T, client *api.Client, path string, token string) { - client.SetToken("") - resp, err := client.Logical().WriteWithContext(ctx, path, map[string]interface{}{}) - if err != nil && isPermDenied(err) { - t.Fatalf("unexpected failure to write %v while unauthed: %v / %v", path, err, resp) - } - - // These should all be denied. - resp, err = client.Logical().ReadWithContext(ctx, path) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during read on write-only path %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().ListWithContext(ctx, path) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during list on write-only path %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().DeleteWithContext(ctx, path) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during delete on write-only path %v while unauthed: %v / %v", path, err, resp) - } - resp, err = client.Logical().JSONMergePatch(ctx, path, map[string]interface{}{}) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during patch on write-only path %v while unauthed: %v / %v", path, err, resp) - } - - // Retrying with token should allow writing, but nothing else. - client.SetToken(token) - resp, err = client.Logical().WriteWithContext(ctx, path, map[string]interface{}{}) - if err != nil && isPermDenied(err) { - t.Fatalf("unexpected failure to write %v while unauthed: %v / %v", path, err, resp) - } - - // These should all be denied. - resp, err = client.Logical().ReadWithContext(ctx, path) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during read on write-only path %v while authed: %v / %v", path, err, resp) - } - resp, err = client.Logical().ListWithContext(ctx, path) - if err == nil || !isDeniedOp(err) { - if resp != nil || err != nil { - t.Fatalf("unexpected failure during list on write-only path %v while authed: %v / %v", path, err, resp) - } - } - resp, err = client.Logical().DeleteWithContext(ctx, path) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during delete on write-only path %v while authed: %v / %v", path, err, resp) - } - resp, err = client.Logical().JSONMergePatch(ctx, path, map[string]interface{}{}) - if err == nil || !isDeniedOp(err) { - t.Fatalf("unexpected failure during patch on write-only path %v while authed: %v / %v", path, err, resp) - } -} - -type pathAuthChecker int - -const ( - shouldBeAuthed pathAuthChecker = iota - shouldBeUnauthedReadList - shouldBeUnauthedWriteOnly -) - -var pathAuthChckerMap = map[pathAuthChecker]pathAuthCheckerFunc{ - shouldBeAuthed: pathShouldBeAuthed, - shouldBeUnauthedReadList: pathShouldBeUnauthedReadList, - shouldBeUnauthedWriteOnly: pathShouldBeUnauthedWriteOnly, -} - -func TestProperAuthing(t *testing.T) { - t.Parallel() - coreConfig := &vault.CoreConfig{ - LogicalBackends: map[string]logical.Factory{ - "ssh": Factory, - }, - } - cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - }) - cluster.Start() - defer cluster.Cleanup() - client := cluster.Cores[0].Client - token := client.Token() - - // Mount SSH. - err := client.Sys().MountWithContext(ctx, "ssh", &api.MountInput{ - Type: "ssh", - Config: api.MountConfigInput{ - DefaultLeaseTTL: "16h", - MaxLeaseTTL: "60h", - }, - }) - if err != nil { - t.Fatal(err) - } - - // Setup basic configuration. - _, err = client.Logical().WriteWithContext(ctx, "ssh/config/ca", map[string]interface{}{ - "generate_signing_key": true, - }) - if err != nil { - t.Fatal(err) - } - - _, err = client.Logical().WriteWithContext(ctx, "ssh/roles/test-ca", map[string]interface{}{ - "key_type": "ca", - "allow_user_certificates": true, - }) - if err != nil { - t.Fatal(err) - } - - _, err = client.Logical().WriteWithContext(ctx, "ssh/issue/test-ca", map[string]interface{}{ - "username": "toor", - }) - if err != nil { - t.Fatal(err) - } - - _, err = client.Logical().WriteWithContext(ctx, "ssh/roles/test-otp", map[string]interface{}{ - "key_type": "otp", - "default_user": "toor", - "cidr_list": "127.0.0.0/24", - }) - if err != nil { - t.Fatal(err) - } - - resp, err := client.Logical().WriteWithContext(ctx, "ssh/creds/test-otp", map[string]interface{}{ - "username": "toor", - "ip": "127.0.0.1", - }) - if err != nil || resp == nil { - t.Fatal(err) - } - // key := resp.Data["key"].(string) - - paths := map[string]pathAuthChecker{ - "config/ca": shouldBeAuthed, - "config/zeroaddress": shouldBeAuthed, - "creds/test-otp": shouldBeAuthed, - "issue/test-ca": shouldBeAuthed, - "lookup": shouldBeAuthed, - "public_key": shouldBeUnauthedReadList, - "roles/test-ca": shouldBeAuthed, - "roles/test-otp": shouldBeAuthed, - "roles": shouldBeAuthed, - "sign/test-ca": shouldBeAuthed, - "tidy/dynamic-keys": shouldBeAuthed, - "verify": shouldBeUnauthedWriteOnly, - } - for path, checkerType := range paths { - checker := pathAuthChckerMap[checkerType] - checker(t, client, "ssh/"+path, token) - } - - client.SetToken(token) - openAPIResp, err := client.Logical().ReadWithContext(ctx, "sys/internal/specs/openapi") - if err != nil { - t.Fatalf("failed to get openapi data: %v", err) - } - - if len(openAPIResp.Data["paths"].(map[string]interface{})) == 0 { - t.Fatalf("expected to get response from OpenAPI; got empty path list") - } - - validatedPath := false - for openapi_path, raw_data := range openAPIResp.Data["paths"].(map[string]interface{}) { - if !strings.HasPrefix(openapi_path, "/ssh/") { - t.Logf("Skipping path: %v", openapi_path) - continue - } - - t.Logf("Validating path: %v", openapi_path) - validatedPath = true - - // Substitute values in from our testing map. - raw_path := openapi_path[5:] - if strings.Contains(raw_path, "{role}") && strings.Contains(raw_path, "roles/") { - raw_path = strings.ReplaceAll(raw_path, "{role}", "test-ca") - } - if strings.Contains(raw_path, "{role}") && (strings.Contains(raw_path, "sign/") || strings.Contains(raw_path, "issue/")) { - raw_path = strings.ReplaceAll(raw_path, "{role}", "test-ca") - } - if strings.Contains(raw_path, "{role}") && strings.Contains(raw_path, "creds") { - raw_path = strings.ReplaceAll(raw_path, "{role}", "test-otp") - } - - handler, present := paths[raw_path] - if !present { - t.Fatalf("OpenAPI reports SSH mount contains %v->%v but was not tested to be authed or authed.", openapi_path, raw_path) - } - - openapi_data := raw_data.(map[string]interface{}) - hasList := false - rawGetData, hasGet := openapi_data["get"] - if hasGet { - getData := rawGetData.(map[string]interface{}) - getParams, paramsPresent := getData["parameters"].(map[string]interface{}) - if getParams != nil && paramsPresent { - if _, hasList = getParams["list"]; hasList { - // LIST is exclusive from GET on the same endpoint usually. - hasGet = false - } - } - } - _, hasPost := openapi_data["post"] - _, hasDelete := openapi_data["delete"] - - if handler == shouldBeUnauthedReadList { - if hasPost || hasDelete { - t.Fatalf("Unauthed read-only endpoints should not have POST/DELETE capabilities") - } - } - } - - if !validatedPath { - t.Fatalf("Expected to have validated at least one path.") - } -} diff --git a/builtin/logical/ssh/cmd/ssh/main.go b/builtin/logical/ssh/cmd/ssh/main.go index a9cf8b26969689..d04bd30af67e2c 100644 --- a/builtin/logical/ssh/cmd/ssh/main.go +++ b/builtin/logical/ssh/cmd/ssh/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: ssh.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/logical/ssh/path_cleanup_dynamic_host_keys.go b/builtin/logical/ssh/path_cleanup_dynamic_host_keys.go index 5ae2afc8e88d36..4318e0b014085b 100644 --- a/builtin/logical/ssh/path_cleanup_dynamic_host_keys.go +++ b/builtin/logical/ssh/path_cleanup_dynamic_host_keys.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -16,11 +13,6 @@ const keysStoragePrefix = "keys/" func pathCleanupKeys(b *backend) *framework.Path { return &framework.Path{ Pattern: "tidy/dynamic-keys", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationVerb: "tidy", - OperationSuffix: "dynamic-host-keys", - }, Callbacks: map[logical.Operation]framework.OperationFunc{ logical.DeleteOperation: b.handleCleanupKeys, }, diff --git a/builtin/logical/ssh/path_config_ca.go b/builtin/logical/ssh/path_config_ca.go index 3fa890c29358df..2fd76c12f9109c 100644 --- a/builtin/logical/ssh/path_config_ca.go +++ b/builtin/logical/ssh/path_config_ca.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -41,11 +38,6 @@ type keyStorageEntry struct { func pathConfigCA(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/ca", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - }, - Fields: map[string]*framework.FieldSchema{ "private_key": { Type: framework.TypeString, @@ -72,26 +64,10 @@ func pathConfigCA(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigCAUpdate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "ca", - }, - }, - logical.DeleteOperation: &framework.PathOperation{ - Callback: b.pathConfigCADelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "ca-configuration", - }, - }, - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigCARead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "ca-configuration", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.pathConfigCAUpdate, + logical.DeleteOperation: b.pathConfigCADelete, + logical.ReadOperation: b.pathConfigCARead, }, HelpSynopsis: `Set the SSH private key used for signing certificates.`, diff --git a/builtin/logical/ssh/path_config_ca_test.go b/builtin/logical/ssh/path_config_ca_test.go index 4c33fc80892dc7..651ed42ce0fb01 100644 --- a/builtin/logical/ssh/path_config_ca_test.go +++ b/builtin/logical/ssh/path_config_ca_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( diff --git a/builtin/logical/ssh/path_config_zeroaddress.go b/builtin/logical/ssh/path_config_zeroaddress.go index 773e9b36fbe0ac..d1e31e234df124 100644 --- a/builtin/logical/ssh/path_config_zeroaddress.go +++ b/builtin/logical/ssh/path_config_zeroaddress.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -21,11 +18,6 @@ type zeroAddressRoles struct { func pathConfigZeroAddress(b *backend) *framework.Path { return &framework.Path{ Pattern: "config/zeroaddress", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - }, - Fields: map[string]*framework.FieldSchema{ "roles": { Type: framework.TypeCommaStringSlice, @@ -34,27 +26,10 @@ func pathConfigZeroAddress(b *backend) *framework.Path { previously registered under these roles will be ignored.`, }, }, - - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigZeroAddressWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "zero-address", - }, - }, - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigZeroAddressRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "zero-address-configuration", - }, - }, - logical.DeleteOperation: &framework.PathOperation{ - Callback: b.pathConfigZeroAddressDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "zero-address-configuration", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.pathConfigZeroAddressWrite, + logical.ReadOperation: b.pathConfigZeroAddressRead, + logical.DeleteOperation: b.pathConfigZeroAddressDelete, }, HelpSynopsis: pathConfigZeroAddressSyn, HelpDescription: pathConfigZeroAddressDesc, diff --git a/builtin/logical/ssh/path_creds_create.go b/builtin/logical/ssh/path_creds_create.go index b95c1f3752e170..d8b8f8bbff6939 100644 --- a/builtin/logical/ssh/path_creds_create.go +++ b/builtin/logical/ssh/path_creds_create.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -23,13 +20,6 @@ type sshOTP struct { func pathCredsCreate(b *backend) *framework.Path { return &framework.Path{ Pattern: "creds/" + framework.GenericNameWithAtRegex("role"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationVerb: "generate", - OperationSuffix: "credentials", - }, - Fields: map[string]*framework.FieldSchema{ "role": { Type: framework.TypeString, @@ -44,11 +34,9 @@ func pathCredsCreate(b *backend) *framework.Path { Description: "[Required] IP of the remote host", }, }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.UpdateOperation: b.pathCredsCreateWrite, }, - HelpSynopsis: pathCredsCreateHelpSyn, HelpDescription: pathCredsCreateHelpDesc, } diff --git a/builtin/logical/ssh/path_fetch.go b/builtin/logical/ssh/path_fetch.go index 3a1fa5f297d4e2..2f45c1c3591523 100644 --- a/builtin/logical/ssh/path_fetch.go +++ b/builtin/logical/ssh/path_fetch.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -14,11 +11,6 @@ func pathFetchPublicKey(b *backend) *framework.Path { return &framework.Path{ Pattern: `public_key`, - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationSuffix: "public-key", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ReadOperation: b.pathFetchPublicKey, }, diff --git a/builtin/logical/ssh/path_issue.go b/builtin/logical/ssh/path_issue.go index b50e03ed777114..77b644590fd042 100644 --- a/builtin/logical/ssh/path_issue.go +++ b/builtin/logical/ssh/path_issue.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -22,12 +19,6 @@ func pathIssue(b *backend) *framework.Path { return &framework.Path{ Pattern: "issue/" + framework.GenericNameWithAtRegex("role"), - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationVerb: "issue", - OperationSuffix: "certificate", - }, - Operations: map[logical.Operation]framework.OperationHandler{ logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathIssue, diff --git a/builtin/logical/ssh/path_issue_sign.go b/builtin/logical/ssh/path_issue_sign.go index c4e68e4721f09c..0ce45d51899170 100644 --- a/builtin/logical/ssh/path_issue_sign.go +++ b/builtin/logical/ssh/path_issue_sign.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -501,7 +498,7 @@ func (b *creationBundle) sign() (retCert *ssh.Certificate, retErr error) { // prepare certificate for signing nonce := make([]byte, 32) if _, err := io.ReadFull(rand.Reader, nonce); err != nil { - return nil, fmt.Errorf("failed to generate signed SSH key: error generating random nonce: %w", err) + return nil, fmt.Errorf("failed to generate signed SSH key: error generating random nonce") } certificate := &ssh.Certificate{ Serial: serialNumber.Uint64(), diff --git a/builtin/logical/ssh/path_lookup.go b/builtin/logical/ssh/path_lookup.go index 8ea0b53ac127c3..05b62af96afd75 100644 --- a/builtin/logical/ssh/path_lookup.go +++ b/builtin/logical/ssh/path_lookup.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -15,24 +12,15 @@ import ( func pathLookup(b *backend) *framework.Path { return &framework.Path{ Pattern: "lookup", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationVerb: "list", - OperationSuffix: "roles-by-ip", - }, - Fields: map[string]*framework.FieldSchema{ "ip": { Type: framework.TypeString, Description: "[Required] IP address of remote host", }, }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.UpdateOperation: b.pathLookupWrite, }, - HelpSynopsis: pathLookupSyn, HelpDescription: pathLookupDesc, } diff --git a/builtin/logical/ssh/path_roles.go b/builtin/logical/ssh/path_roles.go index b16c1d762cdec3..a41772578f0c3f 100644 --- a/builtin/logical/ssh/path_roles.go +++ b/builtin/logical/ssh/path_roles.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -72,11 +69,6 @@ func pathListRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationSuffix: "roles", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathRoleList, }, @@ -89,12 +81,6 @@ func pathListRoles(b *backend) *framework.Path { func pathRoles(b *backend) *framework.Path { return &framework.Path{ Pattern: "roles/" + framework.GenericNameWithAtRegex("role"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationSuffix: "role", - }, - Fields: map[string]*framework.FieldSchema{ "role": { Type: framework.TypeString, diff --git a/builtin/logical/ssh/path_sign.go b/builtin/logical/ssh/path_sign.go index 36971ebf4933df..19196013e6d50c 100644 --- a/builtin/logical/ssh/path_sign.go +++ b/builtin/logical/ssh/path_sign.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -15,12 +12,6 @@ func pathSign(b *backend) *framework.Path { return &framework.Path{ Pattern: "sign/" + framework.GenericNameWithAtRegex("role"), - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationVerb: "sign", - OperationSuffix: "certificate", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.UpdateOperation: b.pathSign, }, diff --git a/builtin/logical/ssh/path_verify.go b/builtin/logical/ssh/path_verify.go index 906272281e5683..7d9814751fd124 100644 --- a/builtin/logical/ssh/path_verify.go +++ b/builtin/logical/ssh/path_verify.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( @@ -14,11 +11,6 @@ import ( func pathVerify(b *backend) *framework.Path { return &framework.Path{ Pattern: "verify", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixSSH, - OperationVerb: "verify", - OperationSuffix: "otp", - }, Fields: map[string]*framework.FieldSchema{ "otp": { Type: framework.TypeString, diff --git a/builtin/logical/ssh/secret_otp.go b/builtin/logical/ssh/secret_otp.go index a70cf601cfe503..72e9903f16bb1f 100644 --- a/builtin/logical/ssh/secret_otp.go +++ b/builtin/logical/ssh/secret_otp.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( diff --git a/builtin/logical/ssh/util.go b/builtin/logical/ssh/util.go index b8867506790765..9eabfa1d8613e4 100644 --- a/builtin/logical/ssh/util.go +++ b/builtin/logical/ssh/util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ssh import ( diff --git a/builtin/logical/totp/backend.go b/builtin/logical/totp/backend.go index 5f0cb52c517529..d2494b499549c1 100644 --- a/builtin/logical/totp/backend.go +++ b/builtin/logical/totp/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package totp import ( @@ -13,8 +10,6 @@ import ( cache "github.com/patrickmn/go-cache" ) -const operationPrefixTOTP = "totp" - func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b := Backend() if err := b.Setup(ctx, conf); err != nil { diff --git a/builtin/logical/totp/backend_test.go b/builtin/logical/totp/backend_test.go index 12600427f27f04..0b68599df64c73 100644 --- a/builtin/logical/totp/backend_test.go +++ b/builtin/logical/totp/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package totp import ( diff --git a/builtin/logical/totp/cmd/totp/main.go b/builtin/logical/totp/cmd/totp/main.go index 9a2a49bd275511..4c96df7f314662 100644 --- a/builtin/logical/totp/cmd/totp/main.go +++ b/builtin/logical/totp/cmd/totp/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: totp.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/logical/totp/path_code.go b/builtin/logical/totp/path_code.go index c792a2905ef013..af56f37da689df 100644 --- a/builtin/logical/totp/path_code.go +++ b/builtin/logical/totp/path_code.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package totp import ( @@ -17,12 +14,6 @@ import ( func pathCode(b *backend) *framework.Path { return &framework.Path{ Pattern: "code/" + framework.GenericNameWithAtRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTOTP, - OperationSuffix: "code", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -34,19 +25,9 @@ func pathCode(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathReadCode, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "generate", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathValidateCode, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "validate", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathReadCode, + logical.UpdateOperation: b.pathValidateCode, }, HelpSynopsis: pathCodeHelpSyn, diff --git a/builtin/logical/totp/path_keys.go b/builtin/logical/totp/path_keys.go index 05e8e5aecf5b5f..d7f7f2abe323bb 100644 --- a/builtin/logical/totp/path_keys.go +++ b/builtin/logical/totp/path_keys.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package totp import ( @@ -24,11 +21,6 @@ func pathListKeys(b *backend) *framework.Path { return &framework.Path{ Pattern: "keys/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTOTP, - OperationSuffix: "keys", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathKeyList, }, @@ -41,12 +33,6 @@ func pathListKeys(b *backend) *framework.Path { func pathKeys(b *backend) *framework.Path { return &framework.Path{ Pattern: "keys/" + framework.GenericNameWithAtRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTOTP, - OperationSuffix: "key", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -122,25 +108,10 @@ func pathKeys(b *backend) *framework.Path { }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathKeyRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "read", - }, - }, - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathKeyCreate, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "create", - }, - }, - logical.DeleteOperation: &framework.PathOperation{ - Callback: b.pathKeyDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "delete", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.pathKeyRead, + logical.UpdateOperation: b.pathKeyCreate, + logical.DeleteOperation: b.pathKeyDelete, }, HelpSynopsis: pathKeyHelpSyn, diff --git a/builtin/logical/transit/backend.go b/builtin/logical/transit/backend.go index 03c3b2fda4e427..05e2a043d81a39 100644 --- a/builtin/logical/transit/backend.go +++ b/builtin/logical/transit/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -19,12 +16,8 @@ import ( "github.com/hashicorp/vault/sdk/logical" ) -const ( - operationPrefixTransit = "transit" - - // Minimum cache size for transit backend - minCacheSize = 10 -) +// Minimum cache size for transit backend +const minCacheSize = 10 func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { b, err := Backend(ctx, conf) @@ -57,7 +50,6 @@ func Backend(ctx context.Context, conf *logical.BackendConfig) (*backend, error) b.pathImportVersion(), b.pathKeys(), b.pathListKeys(), - b.pathBYOKExportKeys(), b.pathExportKeys(), b.pathKeysConfig(), b.pathEncrypt(), diff --git a/builtin/logical/transit/backend_test.go b/builtin/logical/transit/backend_test.go index d23c19465ff7f5..71cbfb641d82f7 100644 --- a/builtin/logical/transit/backend_test.go +++ b/builtin/logical/transit/backend_test.go @@ -1,12 +1,8 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( "context" "crypto" - "crypto/ed25519" cryptoRand "crypto/rand" "crypto/x509" "encoding/base64" @@ -1072,7 +1068,9 @@ func testConvergentEncryptionCommon(t *testing.T, ver int, keyType keysutil.KeyT if err != nil { t.Fatal(err) } - require.NotNil(t, resp, "expected populated request") + if resp != nil { + t.Fatal("expected nil response") + } p, err := keysutil.LoadPolicy(context.Background(), storage, path.Join("policy", "testkey")) if err != nil { @@ -1558,7 +1556,9 @@ func TestBadInput(t *testing.T) { if err != nil { t.Fatal(err) } - require.NotNil(t, resp, "expected populated request") + if resp != nil { + t.Fatal("expected nil response") + } req.Path = "decrypt/test" req.Data = map[string]interface{}{ @@ -1647,7 +1647,9 @@ func TestTransit_AutoRotateKeys(t *testing.T) { if err != nil { t.Fatal(err) } - require.NotNil(t, resp, "expected populated request") + if resp != nil { + t.Fatal("expected nil response") + } // Write a key with an auto rotate value one day in the future req = &logical.Request{ @@ -1662,7 +1664,9 @@ func TestTransit_AutoRotateKeys(t *testing.T) { if err != nil { t.Fatal(err) } - require.NotNil(t, resp, "expected populated request") + if resp != nil { + t.Fatal("expected nil response") + } // Run the rotation check and ensure none of the keys have rotated b.checkAutoRotateAfter = time.Now() @@ -2015,284 +2019,3 @@ func TestTransitPKICSR(t *testing.T) { t.Logf("root: %v", rootCertPEM) t.Logf("leaf: %v", leafCertPEM) } - -func TestTransit_ReadPublicKeyImported(t *testing.T) { - testTransit_ReadPublicKeyImported(t, "rsa-2048") - testTransit_ReadPublicKeyImported(t, "ecdsa-p256") - testTransit_ReadPublicKeyImported(t, "ed25519") -} - -func testTransit_ReadPublicKeyImported(t *testing.T, keyType string) { - generateKeys(t) - b, s := createBackendWithStorage(t) - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get key - privateKey := getKey(t, keyType) - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatalf("failed to extract the public key: %s", err) - } - - // Import key - importReq := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - }, - } - importResp, err := b.HandleRequest(context.Background(), importReq) - if err != nil || (importResp != nil && importResp.IsError()) { - t.Fatalf("failed to import public key. err: %s\nresp: %#v", err, importResp) - } - - // Read key - readReq := &logical.Request{ - Operation: logical.ReadOperation, - Path: "keys/" + keyID, - Storage: s, - } - - readResp, err := b.HandleRequest(context.Background(), readReq) - if err != nil || (readResp != nil && readResp.IsError()) { - t.Fatalf("failed to read key. err: %s\nresp: %#v", err, readResp) - } -} - -func TestTransit_SignWithImportedPublicKey(t *testing.T) { - testTransit_SignWithImportedPublicKey(t, "rsa-2048") - testTransit_SignWithImportedPublicKey(t, "ecdsa-p256") - testTransit_SignWithImportedPublicKey(t, "ed25519") -} - -func testTransit_SignWithImportedPublicKey(t *testing.T, keyType string) { - generateKeys(t) - b, s := createBackendWithStorage(t) - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get key - privateKey := getKey(t, keyType) - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatalf("failed to extract the public key: %s", err) - } - - // Import key - importReq := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - }, - } - importResp, err := b.HandleRequest(context.Background(), importReq) - if err != nil || (importResp != nil && importResp.IsError()) { - t.Fatalf("failed to import public key. err: %s\nresp: %#v", err, importResp) - } - - // Sign text - signReq := &logical.Request{ - Path: "sign/" + keyID, - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "plaintext": base64.StdEncoding.EncodeToString([]byte(testPlaintext)), - }, - } - - _, err = b.HandleRequest(context.Background(), signReq) - if err == nil { - t.Fatalf("expected error, should have failed to sign input") - } -} - -func TestTransit_VerifyWithImportedPublicKey(t *testing.T) { - generateKeys(t) - keyType := "rsa-2048" - b, s := createBackendWithStorage(t) - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get key - privateKey := getKey(t, keyType) - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatal(err) - } - - // Retrieve public wrapping key - wrappingKey, err := b.getWrappingKey(context.Background(), s) - if err != nil || wrappingKey == nil { - t.Fatalf("failed to retrieve public wrapping key: %s", err) - } - - privWrappingKey := wrappingKey.Keys[strconv.Itoa(wrappingKey.LatestVersion)].RSAKey - pubWrappingKey := &privWrappingKey.PublicKey - - // generate ciphertext - importBlob := wrapTargetKeyForImport(t, pubWrappingKey, privateKey, keyType, "SHA256") - - // Import private key - importReq := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "ciphertext": importBlob, - "type": keyType, - }, - } - importResp, err := b.HandleRequest(context.Background(), importReq) - if err != nil || (importResp != nil && importResp.IsError()) { - t.Fatalf("failed to import key. err: %s\nresp: %#v", err, importResp) - } - - // Sign text - signReq := &logical.Request{ - Storage: s, - Path: "sign/" + keyID, - Operation: logical.UpdateOperation, - Data: map[string]interface{}{ - "plaintext": base64.StdEncoding.EncodeToString([]byte(testPlaintext)), - }, - } - - signResp, err := b.HandleRequest(context.Background(), signReq) - if err != nil || (signResp != nil && signResp.IsError()) { - t.Fatalf("failed to sign plaintext. err: %s\nresp: %#v", err, signResp) - } - - // Get signature - signature := signResp.Data["signature"].(string) - - // Import new key as public key - importPubReq := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", "public-key-rsa"), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - }, - } - importPubResp, err := b.HandleRequest(context.Background(), importPubReq) - if err != nil || (importPubResp != nil && importPubResp.IsError()) { - t.Fatalf("failed to import public key. err: %s\nresp: %#v", err, importPubResp) - } - - // Verify signed text - verifyReq := &logical.Request{ - Path: "verify/public-key-rsa", - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "input": base64.StdEncoding.EncodeToString([]byte(testPlaintext)), - "signature": signature, - }, - } - - verifyResp, err := b.HandleRequest(context.Background(), verifyReq) - if err != nil || (importResp != nil && verifyResp.IsError()) { - t.Fatalf("failed to verify signed data. err: %s\nresp: %#v", err, importResp) - } -} - -func TestTransit_ExportPublicKeyImported(t *testing.T) { - testTransit_ExportPublicKeyImported(t, "rsa-2048") - testTransit_ExportPublicKeyImported(t, "ecdsa-p256") - testTransit_ExportPublicKeyImported(t, "ed25519") -} - -func testTransit_ExportPublicKeyImported(t *testing.T, keyType string) { - generateKeys(t) - b, s := createBackendWithStorage(t) - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get key - privateKey := getKey(t, keyType) - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatalf("failed to extract the public key: %s", err) - } - - t.Logf("generated key: %v", string(publicKeyBytes)) - - // Import key - importReq := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - "exportable": true, - }, - } - importResp, err := b.HandleRequest(context.Background(), importReq) - if err != nil || (importResp != nil && importResp.IsError()) { - t.Fatalf("failed to import public key. err: %s\nresp: %#v", err, importResp) - } - - t.Logf("importing key: %v", importResp) - - // Export key - exportReq := &logical.Request{ - Operation: logical.ReadOperation, - Path: fmt.Sprintf("export/public-key/%s/latest", keyID), - Storage: s, - } - - exportResp, err := b.HandleRequest(context.Background(), exportReq) - if err != nil || (exportResp != nil && exportResp.IsError()) { - t.Fatalf("failed to export key. err: %v\nresp: %#v", err, exportResp) - } - - t.Logf("exporting key: %v", exportResp) - - responseKeys, exist := exportResp.Data["keys"] - if !exist { - t.Fatal("expected response data to hold a 'keys' field") - } - - exportedKeyBytes := responseKeys.(map[string]string)["1"] - - if keyType != "ed25519" { - exportedKeyBlock, _ := pem.Decode([]byte(exportedKeyBytes)) - publicKeyBlock, _ := pem.Decode(publicKeyBytes) - - if !reflect.DeepEqual(publicKeyBlock.Bytes, exportedKeyBlock.Bytes) { - t.Fatalf("exported key bytes should have matched with imported key for key type: %v\nexported: %v\nimported: %v", keyType, exportedKeyBlock.Bytes, publicKeyBlock.Bytes) - } - } else { - exportedKey, err := base64.StdEncoding.DecodeString(exportedKeyBytes) - if err != nil { - t.Fatalf("error decoding exported key bytes (%v) to base64 for key type %v: %v", exportedKeyBytes, keyType, err) - } - - publicKeyBlock, _ := pem.Decode(publicKeyBytes) - publicKeyParsed, err := x509.ParsePKIXPublicKey(publicKeyBlock.Bytes) - if err != nil { - t.Fatalf("error decoding source key bytes (%v) from PKIX marshaling for key type %v: %v", publicKeyBlock.Bytes, keyType, err) - } - - if !reflect.DeepEqual([]byte(publicKeyParsed.(ed25519.PublicKey)), exportedKey) { - t.Fatalf("exported key bytes should have matched with imported key for key type: %v\nexported: %v\nimported: %v", keyType, exportedKey, publicKeyParsed) - } - } -} diff --git a/builtin/logical/transit/cmd/transit/main.go b/builtin/logical/transit/cmd/transit/main.go index 7e2ae8777bdc51..25d4675b908303 100644 --- a/builtin/logical/transit/cmd/transit/main.go +++ b/builtin/logical/transit/cmd/transit/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( @@ -20,11 +17,9 @@ func main() { tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := api.VaultPluginTLSProvider(tlsConfig) - if err := plugin.ServeMultiplex(&plugin.ServeOpts{ + if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: transit.Factory, - // set the TLSProviderFunc so that the plugin maintains backwards - // compatibility with Vault versions that don’t support plugin AutoMTLS - TLSProviderFunc: tlsProviderFunc, + TLSProviderFunc: tlsProviderFunc, }); err != nil { logger := hclog.New(&hclog.LoggerOptions{}) diff --git a/builtin/logical/transit/managed_key_util.go b/builtin/logical/transit/managed_key_util.go index c4dc1e9fe0d371..629714e68ea067 100644 --- a/builtin/logical/transit/managed_key_util.go +++ b/builtin/logical/transit/managed_key_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package transit diff --git a/builtin/logical/transit/path_backup.go b/builtin/logical/transit/path_backup.go index 93833423387d75..ef13f0aab88cd6 100644 --- a/builtin/logical/transit/path_backup.go +++ b/builtin/logical/transit/path_backup.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -13,13 +10,6 @@ import ( func (b *backend) pathBackup() *framework.Path { return &framework.Path{ Pattern: "backup/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "back-up", - OperationSuffix: "key", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_backup_test.go b/builtin/logical/transit/path_backup_test.go index 3627d6b702a773..d8a54d70fcc13f 100644 --- a/builtin/logical/transit/path_backup_test.go +++ b/builtin/logical/transit/path_backup_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_byok.go b/builtin/logical/transit/path_byok.go deleted file mode 100644 index 40f7cac1a472e8..00000000000000 --- a/builtin/logical/transit/path_byok.go +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package transit - -import ( - "context" - "crypto/ecdsa" - "crypto/ed25519" - "crypto/elliptic" - "errors" - "fmt" - "strconv" - "strings" - - "github.com/hashicorp/vault/sdk/framework" - "github.com/hashicorp/vault/sdk/helper/keysutil" - "github.com/hashicorp/vault/sdk/logical" -) - -func (b *backend) pathBYOKExportKeys() *framework.Path { - return &framework.Path{ - Pattern: "byok-export/" + framework.GenericNameRegex("destination") + "/" + framework.GenericNameRegex("source") + framework.OptionalParamRegex("version"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "byok", - OperationSuffix: "key|key-version", - }, - - Fields: map[string]*framework.FieldSchema{ - "destination": { - Type: framework.TypeString, - Description: "Destination key to export to; usually the public wrapping key of another Transit instance.", - }, - "source": { - Type: framework.TypeString, - Description: "Source key to export; could be any present key within Transit.", - }, - "version": { - Type: framework.TypeString, - Description: "Optional version of the key to export, else all key versions are exported.", - }, - "hash": { - Type: framework.TypeString, - Description: "Hash function to use for inner OAEP encryption. Defaults to SHA256.", - Default: "SHA256", - }, - }, - - Callbacks: map[logical.Operation]framework.OperationFunc{ - logical.ReadOperation: b.pathPolicyBYOKExportRead, - }, - - HelpSynopsis: pathBYOKExportHelpSyn, - HelpDescription: pathBYOKExportHelpDesc, - } -} - -func (b *backend) pathPolicyBYOKExportRead(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) { - dst := d.Get("destination").(string) - src := d.Get("source").(string) - version := d.Get("version").(string) - hash := d.Get("hash").(string) - - dstP, _, err := b.GetPolicy(ctx, keysutil.PolicyRequest{ - Storage: req.Storage, - Name: dst, - }, b.GetRandomReader()) - if err != nil { - return nil, err - } - if dstP == nil { - return nil, fmt.Errorf("no such destination key to export to") - } - if !b.System().CachingDisabled() { - dstP.Lock(false) - } - defer dstP.Unlock() - - srcP, _, err := b.GetPolicy(ctx, keysutil.PolicyRequest{ - Storage: req.Storage, - Name: src, - }, b.GetRandomReader()) - if err != nil { - return nil, err - } - if srcP == nil { - return nil, fmt.Errorf("no such source key for export") - } - if !b.System().CachingDisabled() { - srcP.Lock(false) - } - defer srcP.Unlock() - - if !srcP.Exportable { - return logical.ErrorResponse("key is not exportable"), nil - } - - retKeys := map[string]string{} - switch version { - case "": - for k, v := range srcP.Keys { - exportKey, err := getBYOKExportKey(dstP, srcP, &v, hash) - if err != nil { - return nil, err - } - retKeys[k] = exportKey - } - - default: - var versionValue int - if version == "latest" { - versionValue = srcP.LatestVersion - } else { - version = strings.TrimPrefix(version, "v") - versionValue, err = strconv.Atoi(version) - if err != nil { - return logical.ErrorResponse("invalid key version"), logical.ErrInvalidRequest - } - } - - if versionValue < srcP.MinDecryptionVersion { - return logical.ErrorResponse("version for export is below minimum decryption version"), logical.ErrInvalidRequest - } - key, ok := srcP.Keys[strconv.Itoa(versionValue)] - if !ok { - return logical.ErrorResponse("version does not exist or cannot be found"), logical.ErrInvalidRequest - } - - exportKey, err := getBYOKExportKey(dstP, srcP, &key, hash) - if err != nil { - return nil, err - } - - retKeys[strconv.Itoa(versionValue)] = exportKey - } - - resp := &logical.Response{ - Data: map[string]interface{}{ - "name": srcP.Name, - "type": srcP.Type.String(), - "keys": retKeys, - }, - } - - return resp, nil -} - -func getBYOKExportKey(dstP *keysutil.Policy, srcP *keysutil.Policy, key *keysutil.KeyEntry, hash string) (string, error) { - if dstP == nil || srcP == nil { - return "", errors.New("nil policy provided") - } - - var targetKey interface{} - switch srcP.Type { - case keysutil.KeyType_AES128_GCM96, keysutil.KeyType_AES256_GCM96, keysutil.KeyType_ChaCha20_Poly1305, keysutil.KeyType_HMAC: - targetKey = key.Key - case keysutil.KeyType_RSA2048, keysutil.KeyType_RSA3072, keysutil.KeyType_RSA4096: - targetKey = key.RSAKey - case keysutil.KeyType_ECDSA_P256, keysutil.KeyType_ECDSA_P384, keysutil.KeyType_ECDSA_P521: - var curve elliptic.Curve - switch srcP.Type { - case keysutil.KeyType_ECDSA_P384: - curve = elliptic.P384() - case keysutil.KeyType_ECDSA_P521: - curve = elliptic.P521() - default: - curve = elliptic.P256() - } - pubKey := ecdsa.PublicKey{ - Curve: curve, - X: key.EC_X, - Y: key.EC_Y, - } - targetKey = &ecdsa.PrivateKey{ - PublicKey: pubKey, - D: key.EC_D, - } - case keysutil.KeyType_ED25519: - targetKey = ed25519.PrivateKey(key.Key) - default: - return "", fmt.Errorf("unable to export to unknown key type: %v", srcP.Type) - } - - hasher, err := parseHashFn(hash) - if err != nil { - return "", err - } - - return dstP.WrapKey(0, targetKey, srcP.Type, hasher) -} - -const pathBYOKExportHelpSyn = `Securely export named encryption or signing key` - -const pathBYOKExportHelpDesc = ` -This path is used to export the named keys that are configured as -exportable. - -Unlike the regular /export/:name[/:version] paths, this path uses -the same encryption specification /import, allowing secure migration -of keys between clusters to enable workloads to communicate between -them. - -Presently this only works for RSA destination keys. -` diff --git a/builtin/logical/transit/path_byok_test.go b/builtin/logical/transit/path_byok_test.go deleted file mode 100644 index a05a719ea4f21c..00000000000000 --- a/builtin/logical/transit/path_byok_test.go +++ /dev/null @@ -1,229 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package transit - -import ( - "context" - "testing" - - "github.com/hashicorp/vault/sdk/logical" -) - -func TestTransit_BYOKExportImport(t *testing.T) { - // Test encryption/decryption after a restore for supported keys - testBYOKExportImport(t, "aes128-gcm96", "encrypt-decrypt") - testBYOKExportImport(t, "aes256-gcm96", "encrypt-decrypt") - testBYOKExportImport(t, "chacha20-poly1305", "encrypt-decrypt") - testBYOKExportImport(t, "rsa-2048", "encrypt-decrypt") - testBYOKExportImport(t, "rsa-3072", "encrypt-decrypt") - testBYOKExportImport(t, "rsa-4096", "encrypt-decrypt") - - // Test signing/verification after a restore for supported keys - testBYOKExportImport(t, "ecdsa-p256", "sign-verify") - testBYOKExportImport(t, "ecdsa-p384", "sign-verify") - testBYOKExportImport(t, "ecdsa-p521", "sign-verify") - testBYOKExportImport(t, "ed25519", "sign-verify") - testBYOKExportImport(t, "rsa-2048", "sign-verify") - testBYOKExportImport(t, "rsa-3072", "sign-verify") - testBYOKExportImport(t, "rsa-4096", "sign-verify") - - // Test HMAC sign/verify after a restore for supported keys. - testBYOKExportImport(t, "hmac", "hmac-verify") -} - -func testBYOKExportImport(t *testing.T, keyType, feature string) { - var resp *logical.Response - var err error - - b, s := createBackendWithStorage(t) - - // Create a key - keyReq := &logical.Request{ - Path: "keys/test-source", - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "type": keyType, - "exportable": true, - }, - } - if keyType == "hmac" { - keyReq.Data["key_size"] = 32 - } - resp, err = b.HandleRequest(context.Background(), keyReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - - // Read the wrapping key. - wrapKeyReq := &logical.Request{ - Path: "wrapping_key", - Operation: logical.ReadOperation, - Storage: s, - } - resp, err = b.HandleRequest(context.Background(), wrapKeyReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - - // Import the wrapping key. - wrapKeyImportReq := &logical.Request{ - Path: "keys/wrapper/import", - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "public_key": resp.Data["public_key"], - "type": "rsa-4096", - }, - } - resp, err = b.HandleRequest(context.Background(), wrapKeyImportReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - - // Export the key - backupReq := &logical.Request{ - Path: "byok-export/wrapper/test-source", - Operation: logical.ReadOperation, - Storage: s, - } - resp, err = b.HandleRequest(context.Background(), backupReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - keys := resp.Data["keys"].(map[string]string) - - // Import the key to a new name. - restoreReq := &logical.Request{ - Path: "keys/test/import", - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "ciphertext": keys["1"], - "type": keyType, - }, - } - resp, err = b.HandleRequest(context.Background(), restoreReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - - plaintextB64 := "dGhlIHF1aWNrIGJyb3duIGZveA==" // "the quick brown fox" - // Perform encryption, signing or hmac-ing based on the set 'feature' - var encryptReq, signReq, hmacReq *logical.Request - var ciphertext, signature, hmac string - switch feature { - case "encrypt-decrypt": - encryptReq = &logical.Request{ - Path: "encrypt/test-source", - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "plaintext": plaintextB64, - }, - } - resp, err = b.HandleRequest(context.Background(), encryptReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - ciphertext = resp.Data["ciphertext"].(string) - - case "sign-verify": - signReq = &logical.Request{ - Path: "sign/test-source", - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "input": plaintextB64, - }, - } - resp, err = b.HandleRequest(context.Background(), signReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - signature = resp.Data["signature"].(string) - - case "hmac-verify": - hmacReq = &logical.Request{ - Path: "hmac/test-source", - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "input": plaintextB64, - }, - } - resp, err = b.HandleRequest(context.Background(), hmacReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - hmac = resp.Data["hmac"].(string) - } - - // validationFunc verifies the ciphertext, signature or hmac based on the - // set 'feature' - validationFunc := func(keyName string) { - var decryptReq *logical.Request - var verifyReq *logical.Request - switch feature { - case "encrypt-decrypt": - decryptReq = &logical.Request{ - Path: "decrypt/" + keyName, - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "ciphertext": ciphertext, - }, - } - resp, err = b.HandleRequest(context.Background(), decryptReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - - if resp.Data["plaintext"].(string) != plaintextB64 { - t.Fatalf("bad: plaintext; expected: %q, actual: %q", plaintextB64, resp.Data["plaintext"].(string)) - } - case "sign-verify": - verifyReq = &logical.Request{ - Path: "verify/" + keyName, - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "signature": signature, - "input": plaintextB64, - }, - } - resp, err = b.HandleRequest(context.Background(), verifyReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - if resp.Data["valid"].(bool) != true { - t.Fatalf("bad: signature verification failed for key type %q", keyType) - } - - case "hmac-verify": - verifyReq = &logical.Request{ - Path: "verify/" + keyName, - Operation: logical.UpdateOperation, - Storage: s, - Data: map[string]interface{}{ - "hmac": hmac, - "input": plaintextB64, - }, - } - resp, err = b.HandleRequest(context.Background(), verifyReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("resp: %#v\nerr: %v", resp, err) - } - if resp.Data["valid"].(bool) != true { - t.Fatalf("bad: HMAC verification failed for key type %q", keyType) - } - } - } - - // Ensure that the restored key is functional - validationFunc("test") - - // Ensure the original key is functional - validationFunc("test-source") -} diff --git a/builtin/logical/transit/path_cache_config.go b/builtin/logical/transit/path_cache_config.go index f8f0cea551eed4..e7692997668df5 100644 --- a/builtin/logical/transit/path_cache_config.go +++ b/builtin/logical/transit/path_cache_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -14,11 +11,6 @@ import ( func (b *backend) pathCacheConfig() *framework.Path { return &framework.Path{ Pattern: "cache-config", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - }, - Fields: map[string]*framework.FieldSchema{ "size": { Type: framework.TypeInt, @@ -32,27 +24,16 @@ func (b *backend) pathCacheConfig() *framework.Path { logical.ReadOperation: &framework.PathOperation{ Callback: b.pathCacheConfigRead, Summary: "Returns the size of the active cache", - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "cache-configuration", - }, }, logical.UpdateOperation: &framework.PathOperation{ Callback: b.pathCacheConfigWrite, Summary: "Configures a new cache of the specified size", - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "cache", - }, }, logical.CreateOperation: &framework.PathOperation{ Callback: b.pathCacheConfigWrite, Summary: "Configures a new cache of the specified size", - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "cache", - }, }, }, @@ -84,11 +65,7 @@ func (b *backend) pathCacheConfigWrite(ctx context.Context, req *logical.Request return nil, err } - return &logical.Response{ - Data: map[string]interface{}{ - "size": cacheSize, - }, - }, nil + return nil, nil } type configCache struct { diff --git a/builtin/logical/transit/path_cache_config_test.go b/builtin/logical/transit/path_cache_config_test.go index f5c8316d878929..d8e0a7b56d87a5 100644 --- a/builtin/logical/transit/path_cache_config_test.go +++ b/builtin/logical/transit/path_cache_config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_config_keys.go b/builtin/logical/transit/path_config_keys.go index bbf2cc4c07a85e..2294636e395103 100644 --- a/builtin/logical/transit/path_config_keys.go +++ b/builtin/logical/transit/path_config_keys.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -24,11 +21,6 @@ var defaultKeysConfig = keysConfig{ func (b *backend) pathConfigKeys() *framework.Path { return &framework.Path{ Pattern: "config/keys", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - }, - Fields: map[string]*framework.FieldSchema{ "disable_upsert": { Type: framework.TypeBool, @@ -37,20 +29,9 @@ keys on the encrypt endpoint.`, }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathConfigKeysWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "configure", - OperationSuffix: "keys", - }, - }, - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathConfigKeysRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationSuffix: "keys-configuration", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.pathConfigKeysWrite, + logical.ReadOperation: b.pathConfigKeysRead, }, HelpSynopsis: pathConfigKeysHelpSyn, diff --git a/builtin/logical/transit/path_config_keys_test.go b/builtin/logical/transit/path_config_keys_test.go index dde7c58a0a9462..8d8f9f940f28c8 100644 --- a/builtin/logical/transit/path_config_keys_test.go +++ b/builtin/logical/transit/path_config_keys_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_datakey.go b/builtin/logical/transit/path_datakey.go index 1c607d0e2f88e7..42da161916392f 100644 --- a/builtin/logical/transit/path_datakey.go +++ b/builtin/logical/transit/path_datakey.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -19,13 +16,6 @@ import ( func (b *backend) pathDatakey() *framework.Path { return &framework.Path{ Pattern: "datakey/" + framework.GenericNameRegex("plaintext") + "/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "generate", - OperationSuffix: "data-key", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_decrypt.go b/builtin/logical/transit/path_decrypt.go index 116732b7f45733..c720b41c4731ca 100644 --- a/builtin/logical/transit/path_decrypt.go +++ b/builtin/logical/transit/path_decrypt.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -32,12 +29,6 @@ type DecryptBatchResponseItem struct { func (b *backend) pathDecrypt() *framework.Path { return &framework.Path{ Pattern: "decrypt/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "decrypt", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_decrypt_bench_test.go b/builtin/logical/transit/path_decrypt_bench_test.go index c4dc72837ab679..67d4bc3b5d5a24 100644 --- a/builtin/logical/transit/path_decrypt_bench_test.go +++ b/builtin/logical/transit/path_decrypt_bench_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_decrypt_test.go b/builtin/logical/transit/path_decrypt_test.go index e69402c7d514f0..928439dd35de44 100644 --- a/builtin/logical/transit/path_decrypt_test.go +++ b/builtin/logical/transit/path_decrypt_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_encrypt.go b/builtin/logical/transit/path_encrypt.go index 78b43a64c91ca2..3e074da63d90e7 100644 --- a/builtin/logical/transit/path_encrypt.go +++ b/builtin/logical/transit/path_encrypt.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -89,12 +86,6 @@ func (m ManagedKeyFactory) GetManagedKeyParameters() keysutil.ManagedKeyParamete func (b *backend) pathEncrypt() *framework.Path { return &framework.Path{ Pattern: "encrypt/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "encrypt", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_encrypt_bench_test.go b/builtin/logical/transit/path_encrypt_bench_test.go index 8aef39dd4b724e..e648c6e02fc32d 100644 --- a/builtin/logical/transit/path_encrypt_bench_test.go +++ b/builtin/logical/transit/path_encrypt_bench_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_encrypt_test.go b/builtin/logical/transit/path_encrypt_test.go index b886d4fefd8f68..5846ac13b3bd29 100644 --- a/builtin/logical/transit/path_encrypt_test.go +++ b/builtin/logical/transit/path_encrypt_test.go @@ -1,19 +1,14 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( "context" "encoding/json" - "fmt" "reflect" "strings" "testing" "github.com/hashicorp/vault/sdk/helper/keysutil" - uuid "github.com/hashicorp/go-uuid" "github.com/hashicorp/vault/sdk/logical" "github.com/mitchellh/mapstructure" ) @@ -946,48 +941,3 @@ func TestShouldWarnAboutNonceUsage(t *testing.T) { } } } - -func TestTransit_EncryptWithRSAPublicKey(t *testing.T) { - generateKeys(t) - b, s := createBackendWithStorage(t) - keyType := "rsa-2048" - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get key - privateKey := getKey(t, keyType) - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatal(err) - } - - // Import key - req := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import public key: %s", err) - } - - req = &logical.Request{ - Operation: logical.CreateOperation, - Path: fmt.Sprintf("encrypt/%s", keyID), - Storage: s, - Data: map[string]interface{}{ - "plaintext": "bXkgc2VjcmV0IGRhdGE=", - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatal(err) - } -} diff --git a/builtin/logical/transit/path_export.go b/builtin/logical/transit/path_export.go index 9239aee94bd4d1..61bb5e595ca66b 100644 --- a/builtin/logical/transit/path_export.go +++ b/builtin/logical/transit/path_export.go @@ -1,12 +1,10 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( "context" "crypto/ecdsa" "crypto/elliptic" + "crypto/rsa" "crypto/x509" "encoding/base64" "encoding/pem" @@ -24,23 +22,15 @@ const ( exportTypeEncryptionKey = "encryption-key" exportTypeSigningKey = "signing-key" exportTypeHMACKey = "hmac-key" - exportTypePublicKey = "public-key" ) func (b *backend) pathExportKeys() *framework.Path { return &framework.Path{ Pattern: "export/" + framework.GenericNameRegex("type") + "/" + framework.GenericNameRegex("name") + framework.OptionalParamRegex("version"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "export", - OperationSuffix: "key|key-version", - }, - Fields: map[string]*framework.FieldSchema{ "type": { Type: framework.TypeString, - Description: "Type of key to export (encryption-key, signing-key, hmac-key, public-key)", + Description: "Type of key to export (encryption-key, signing-key, hmac-key)", }, "name": { Type: framework.TypeString, @@ -70,7 +60,6 @@ func (b *backend) pathPolicyExportRead(ctx context.Context, req *logical.Request case exportTypeEncryptionKey: case exportTypeSigningKey: case exportTypeHMACKey: - case exportTypePublicKey: default: return logical.ErrorResponse(fmt.Sprintf("invalid export type: %s", exportType)), logical.ErrInvalidRequest } @@ -90,8 +79,8 @@ func (b *backend) pathPolicyExportRead(ctx context.Context, req *logical.Request } defer p.Unlock() - if !p.Exportable && exportType != exportTypePublicKey { - return logical.ErrorResponse("private key material is not exportable"), nil + if !p.Exportable { + return logical.ErrorResponse("key is not exportable"), nil } switch exportType { @@ -174,11 +163,7 @@ func getExportKey(policy *keysutil.Policy, key *keysutil.KeyEntry, exportType st return strings.TrimSpace(base64.StdEncoding.EncodeToString(key.Key)), nil case keysutil.KeyType_RSA2048, keysutil.KeyType_RSA3072, keysutil.KeyType_RSA4096: - rsaKey, err := encodeRSAPrivateKey(key) - if err != nil { - return "", err - } - return rsaKey, nil + return encodeRSAPrivateKey(key.RSAKey), nil } case exportTypeSigningKey: @@ -200,92 +185,26 @@ func getExportKey(policy *keysutil.Policy, key *keysutil.KeyEntry, exportType st return ecKey, nil case keysutil.KeyType_ED25519: - if len(key.Key) == 0 { - return "", nil - } - return strings.TrimSpace(base64.StdEncoding.EncodeToString(key.Key)), nil case keysutil.KeyType_RSA2048, keysutil.KeyType_RSA3072, keysutil.KeyType_RSA4096: - rsaKey, err := encodeRSAPrivateKey(key) - if err != nil { - return "", err - } - return rsaKey, nil - } - case exportTypePublicKey: - switch policy.Type { - case keysutil.KeyType_ECDSA_P256, keysutil.KeyType_ECDSA_P384, keysutil.KeyType_ECDSA_P521: - var curve elliptic.Curve - switch policy.Type { - case keysutil.KeyType_ECDSA_P384: - curve = elliptic.P384() - case keysutil.KeyType_ECDSA_P521: - curve = elliptic.P521() - default: - curve = elliptic.P256() - } - ecKey, err := keyEntryToECPublicKey(key, curve) - if err != nil { - return "", err - } - return ecKey, nil - - case keysutil.KeyType_ED25519: - return strings.TrimSpace(key.FormattedPublicKey), nil - - case keysutil.KeyType_RSA2048, keysutil.KeyType_RSA3072, keysutil.KeyType_RSA4096: - rsaKey, err := encodeRSAPublicKey(key) - if err != nil { - return "", err - } - return rsaKey, nil + return encodeRSAPrivateKey(key.RSAKey), nil } } - return "", fmt.Errorf("unknown key type %v for export type %v", policy.Type, exportType) + return "", fmt.Errorf("unknown key type %v", policy.Type) } -func encodeRSAPrivateKey(key *keysutil.KeyEntry) (string, error) { - if key == nil { - return "", errors.New("nil KeyEntry provided") - } - - if key.IsPrivateKeyMissing() { - return "", nil - } - +func encodeRSAPrivateKey(key *rsa.PrivateKey) string { // When encoding PKCS1, the PEM header should be `RSA PRIVATE KEY`. When Go // has PKCS8 encoding support, we may want to change this. - blockType := "RSA PRIVATE KEY" - derBytes := x509.MarshalPKCS1PrivateKey(key.RSAKey) - pemBlock := pem.Block{ - Type: blockType, + derBytes := x509.MarshalPKCS1PrivateKey(key) + pemBlock := &pem.Block{ + Type: "RSA PRIVATE KEY", Bytes: derBytes, } - - pemBytes := pem.EncodeToMemory(&pemBlock) - return string(pemBytes), nil -} - -func encodeRSAPublicKey(key *keysutil.KeyEntry) (string, error) { - if key == nil { - return "", errors.New("nil KeyEntry provided") - } - - blockType := "RSA PUBLIC KEY" - derBytes, err := x509.MarshalPKIXPublicKey(key.RSAPublicKey) - if err != nil { - return "", err - } - - pemBlock := pem.Block{ - Type: blockType, - Bytes: derBytes, - } - - pemBytes := pem.EncodeToMemory(&pemBlock) - return string(pemBytes), nil + pemBytes := pem.EncodeToMemory(pemBlock) + return string(pemBytes) } func keyEntryToECPrivateKey(k *keysutil.KeyEntry, curve elliptic.Curve) (string, error) { @@ -293,57 +212,27 @@ func keyEntryToECPrivateKey(k *keysutil.KeyEntry, curve elliptic.Curve) (string, return "", errors.New("nil KeyEntry provided") } - if k.IsPrivateKeyMissing() { - return "", nil - } - - pubKey := ecdsa.PublicKey{ - Curve: curve, - X: k.EC_X, - Y: k.EC_Y, - } - - blockType := "EC PRIVATE KEY" privKey := &ecdsa.PrivateKey{ - PublicKey: pubKey, - D: k.EC_D, + PublicKey: ecdsa.PublicKey{ + Curve: curve, + X: k.EC_X, + Y: k.EC_Y, + }, + D: k.EC_D, } - derBytes, err := x509.MarshalECPrivateKey(privKey) + ecder, err := x509.MarshalECPrivateKey(privKey) if err != nil { return "", err } - - pemBlock := pem.Block{ - Type: blockType, - Bytes: derBytes, - } - - return strings.TrimSpace(string(pem.EncodeToMemory(&pemBlock))), nil -} - -func keyEntryToECPublicKey(k *keysutil.KeyEntry, curve elliptic.Curve) (string, error) { - if k == nil { - return "", errors.New("nil KeyEntry provided") - } - - pubKey := ecdsa.PublicKey{ - Curve: curve, - X: k.EC_X, - Y: k.EC_Y, + if ecder == nil { + return "", errors.New("no data returned when marshalling to private key") } - blockType := "PUBLIC KEY" - derBytes, err := x509.MarshalPKIXPublicKey(&pubKey) - if err != nil { - return "", err + block := pem.Block{ + Type: "EC PRIVATE KEY", + Bytes: ecder, } - - pemBlock := pem.Block{ - Type: blockType, - Bytes: derBytes, - } - - return strings.TrimSpace(string(pem.EncodeToMemory(&pemBlock))), nil + return strings.TrimSpace(string(pem.EncodeToMemory(&block))), nil } const pathExportHelpSyn = `Export named encryption or signing key` diff --git a/builtin/logical/transit/path_export_test.go b/builtin/logical/transit/path_export_test.go index cd25383f2b0608..b44a4e7e0fe269 100644 --- a/builtin/logical/transit/path_export_test.go +++ b/builtin/logical/transit/path_export_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_hash.go b/builtin/logical/transit/path_hash.go index ecf619a52e4480..51ca37daa23177 100644 --- a/builtin/logical/transit/path_hash.go +++ b/builtin/logical/transit/path_hash.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -21,13 +18,6 @@ import ( func (b *backend) pathHash() *framework.Path { return &framework.Path{ Pattern: "hash" + framework.OptionalParamRegex("urlalgorithm"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "hash", - OperationSuffix: "|with-algorithm", - }, - Fields: map[string]*framework.FieldSchema{ "input": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_hash_test.go b/builtin/logical/transit/path_hash_test.go index 084012dd4bafee..3e5dce95c2999f 100644 --- a/builtin/logical/transit/path_hash_test.go +++ b/builtin/logical/transit/path_hash_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_hmac.go b/builtin/logical/transit/path_hmac.go index 704a1b866cfff4..3fdde6bb0d1328 100644 --- a/builtin/logical/transit/path_hmac.go +++ b/builtin/logical/transit/path_hmac.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -48,13 +45,6 @@ type batchResponseHMACItem struct { func (b *backend) pathHMAC() *framework.Path { return &framework.Path{ Pattern: "hmac/" + framework.GenericNameRegex("name") + framework.OptionalParamRegex("urlalgorithm"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "generate", - OperationSuffix: "hmac|hmac-with-algorithm", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_hmac_test.go b/builtin/logical/transit/path_hmac_test.go index af98dd2ca081a3..204e94ec04f8db 100644 --- a/builtin/logical/transit/path_hmac_test.go +++ b/builtin/logical/transit/path_hmac_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_import.go b/builtin/logical/transit/path_import.go index 45cb4dd108f22d..817cf5fc5ddf23 100644 --- a/builtin/logical/transit/path_import.go +++ b/builtin/logical/transit/path_import.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -28,13 +25,6 @@ const EncryptedKeyBytes = 512 func (b *backend) pathImport() *framework.Path { return &framework.Path{ Pattern: "keys/" + framework.GenericNameRegex("name") + "/import", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "import", - OperationSuffix: "key", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -59,10 +49,6 @@ ephemeral AES key. Can be one of "SHA1", "SHA224", "SHA256" (default), "SHA384", Description: `The base64-encoded ciphertext of the keys. The AES key should be encrypted using OAEP with the wrapping key and then concatenated with the import key, wrapped by the AES key.`, }, - "public_key": { - Type: framework.TypeString, - Description: `The plaintext PEM public key to be imported. If "ciphertext" is set, this field is ignored.`, - }, "allow_rotation": { Type: framework.TypeBool, Description: "True if the imported key may be rotated within Vault; false otherwise.", @@ -115,13 +101,6 @@ key.`, func (b *backend) pathImportVersion() *framework.Path { return &framework.Path{ Pattern: "keys/" + framework.GenericNameRegex("name") + "/import_version", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "import", - OperationSuffix: "key-version", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -132,21 +111,12 @@ func (b *backend) pathImportVersion() *framework.Path { Description: `The base64-encoded ciphertext of the keys. The AES key should be encrypted using OAEP with the wrapping key and then concatenated with the import key, wrapped by the AES key.`, }, - "public_key": { - Type: framework.TypeString, - Description: `The plaintext public key to be imported. If "ciphertext" is set, this field is ignored.`, - }, "hash_function": { Type: framework.TypeString, Default: "SHA256", Description: `The hash function used as a random oracle in the OAEP wrapping of the user-generated, ephemeral AES key. Can be one of "SHA1", "SHA224", "SHA256" (default), "SHA384", or "SHA512"`, }, - "version": { - Type: framework.TypeInt, - Description: `Key version to be updated, if left empty, a new version will be created unless -a private key is specified and the 'Latest' key is missing a private key.`, - }, }, Callbacks: map[logical.Operation]framework.OperationFunc{ logical.UpdateOperation: b.pathImportVersionWrite, @@ -160,9 +130,11 @@ func (b *backend) pathImportWrite(ctx context.Context, req *logical.Request, d * name := d.Get("name").(string) derived := d.Get("derived").(bool) keyType := d.Get("type").(string) + hashFnStr := d.Get("hash_function").(string) exportable := d.Get("exportable").(bool) allowPlaintextBackup := d.Get("allow_plaintext_backup").(bool) autoRotatePeriod := time.Second * time.Duration(d.Get("auto_rotate_period").(int)) + ciphertextString := d.Get("ciphertext").(string) allowRotation := d.Get("allow_rotation").(bool) // Ensure the caller didn't supply "convergent_encryption" as a field, since it's not supported on import. @@ -174,12 +146,6 @@ func (b *backend) pathImportWrite(ctx context.Context, req *logical.Request, d * return nil, errors.New("allow_rotation must be set to true if auto-rotation is enabled") } - // Ensure that at least on `key` field has been set - isCiphertextSet, err := checkKeyFieldsSet(d) - if err != nil { - return nil, err - } - polReq := keysutil.PolicyRequest{ Storage: req.Storage, Name: name, @@ -188,7 +154,6 @@ func (b *backend) pathImportWrite(ctx context.Context, req *logical.Request, d * AllowPlaintextBackup: allowPlaintextBackup, AutoRotatePeriod: autoRotatePeriod, AllowImportedKeyRotation: allowRotation, - IsPrivateKey: isCiphertextSet, } switch strings.ToLower(keyType) { @@ -218,6 +183,11 @@ func (b *backend) pathImportWrite(ctx context.Context, req *logical.Request, d * return logical.ErrorResponse(fmt.Sprintf("unknown key type: %v", keyType)), logical.ErrInvalidRequest } + hashFn, err := parseHashFn(hashFnStr) + if err != nil { + return logical.ErrorResponse(err.Error()), logical.ErrInvalidRequest + } + p, _, err := b.GetPolicy(ctx, polReq, b.GetRandomReader()) if err != nil { return nil, err @@ -230,9 +200,14 @@ func (b *backend) pathImportWrite(ctx context.Context, req *logical.Request, d * return nil, errors.New("the import path cannot be used with an existing key; use import-version to rotate an existing imported key") } - key, resp, err := b.extractKeyFromFields(ctx, req, d, polReq.KeyType, isCiphertextSet) + ciphertext, err := base64.StdEncoding.DecodeString(ciphertextString) if err != nil { - return resp, err + return nil, err + } + + key, err := b.decryptImportedKey(ctx, req.Storage, ciphertext, hashFn) + if err != nil { + return nil, err } err = b.lm.ImportPolicy(ctx, polReq, key, b.GetRandomReader()) @@ -245,18 +220,20 @@ func (b *backend) pathImportWrite(ctx context.Context, req *logical.Request, d * func (b *backend) pathImportVersionWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) { name := d.Get("name").(string) + hashFnStr := d.Get("hash_function").(string) + ciphertextString := d.Get("ciphertext").(string) - isCiphertextSet, err := checkKeyFieldsSet(d) - if err != nil { - return nil, err + polReq := keysutil.PolicyRequest{ + Storage: req.Storage, + Name: name, + Upsert: false, } - polReq := keysutil.PolicyRequest{ - Storage: req.Storage, - Name: name, - Upsert: false, - IsPrivateKey: isCiphertextSet, + hashFn, err := parseHashFn(hashFnStr) + if err != nil { + return logical.ErrorResponse(err.Error()), logical.ErrInvalidRequest } + p, _, err := b.GetPolicy(ctx, polReq, b.GetRandomReader()) if err != nil { return nil, err @@ -276,24 +253,15 @@ func (b *backend) pathImportVersionWrite(ctx context.Context, req *logical.Reque } defer p.Unlock() - key, resp, err := b.extractKeyFromFields(ctx, req, d, p.Type, isCiphertextSet) + ciphertext, err := base64.StdEncoding.DecodeString(ciphertextString) if err != nil { - return resp, err + return nil, err } - - // Get param version if set else import a new version. - if version, ok := d.GetOk("version"); ok { - versionToUpdate := version.(int) - - // Check if given version can be updated given input - err = p.KeyVersionCanBeUpdated(versionToUpdate, isCiphertextSet) - if err == nil { - err = p.ImportPrivateKeyForVersion(ctx, req.Storage, versionToUpdate, key) - } - } else { - err = p.ImportPublicOrPrivate(ctx, req.Storage, key, isCiphertextSet, b.GetRandomReader()) + importKey, err := b.decryptImportedKey(ctx, req.Storage, ciphertext, hashFn) + if err != nil { + return nil, err } - + err = p.Import(ctx, req.Storage, importKey, b.GetRandomReader()) if err != nil { return nil, err } @@ -351,36 +319,6 @@ func (b *backend) decryptImportedKey(ctx context.Context, storage logical.Storag return importKey, nil } -func (b *backend) extractKeyFromFields(ctx context.Context, req *logical.Request, d *framework.FieldData, keyType keysutil.KeyType, isPrivateKey bool) ([]byte, *logical.Response, error) { - var key []byte - if isPrivateKey { - hashFnStr := d.Get("hash_function").(string) - hashFn, err := parseHashFn(hashFnStr) - if err != nil { - return key, logical.ErrorResponse(err.Error()), logical.ErrInvalidRequest - } - - ciphertextString := d.Get("ciphertext").(string) - ciphertext, err := base64.StdEncoding.DecodeString(ciphertextString) - if err != nil { - return key, nil, err - } - - key, err = b.decryptImportedKey(ctx, req.Storage, ciphertext, hashFn) - if err != nil { - return key, nil, err - } - } else { - publicKeyString := d.Get("public_key").(string) - if !keyType.ImportPublicKeySupported() { - return key, nil, errors.New("provided type does not support public_key import") - } - key = []byte(publicKeyString) - } - - return key, nil, nil -} - func parseHashFn(hashFn string) (hash.Hash, error) { switch strings.ToUpper(hashFn) { case "SHA1": @@ -398,29 +336,6 @@ func parseHashFn(hashFn string) (hash.Hash, error) { } } -// checkKeyFieldsSet: Checks which key fields are set. If both are set, an error is returned -func checkKeyFieldsSet(d *framework.FieldData) (bool, error) { - ciphertextSet := isFieldSet("ciphertext", d) - publicKeySet := isFieldSet("publicKey", d) - - if ciphertextSet && publicKeySet { - return false, errors.New("only one of the following fields, ciphertext and public_key, can be set") - } else if ciphertextSet { - return true, nil - } else { - return false, nil - } -} - -func isFieldSet(fieldName string, d *framework.FieldData) bool { - _, fieldSet := d.Raw[fieldName] - if !fieldSet { - return false - } - - return true -} - const ( pathImportWriteSyn = "Imports an externally-generated key into a new transit key" pathImportWriteDesc = "This path is used to import an externally-generated " + diff --git a/builtin/logical/transit/path_import_test.go b/builtin/logical/transit/path_import_test.go index cb59e8de27d7ae..d31b12b454e4c7 100644 --- a/builtin/logical/transit/path_import_test.go +++ b/builtin/logical/transit/path_import_test.go @@ -1,11 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( "context" - "crypto" "crypto/ecdsa" "crypto/ed25519" "crypto/elliptic" @@ -13,7 +9,6 @@ import ( "crypto/rsa" "crypto/x509" "encoding/base64" - "encoding/pem" "fmt" "strconv" "sync" @@ -51,10 +46,7 @@ var ( keys = map[string]interface{}{} ) -const ( - nssFormattedEd25519Key = "MGcCAQAwFAYHKoZIzj0CAQYJKwYBBAHaRw8BBEwwSgIBAQQgfJm5R+LK4FMwGzOpemTBXksimEVOVCE8QeC+XBBfNU+hIwMhADaif7IhYx46IHcRTy1z8LeyhABep+UB8Da6olMZGx0i" - rsaPSSFormattedKey = "MIIEvAIBADALBgkqhkiG9w0BAQoEggSoMIIEpAIBAAKCAQEAiFXSBaicB534+2qMZTVzQHMjuhb4NM9hi5H4EAFiYHEBuvm2BAk58NdBK3wiMq/p7Ewu5NQI0gJ7GlcV1MBU94U6MEmWNd0ztmlz37esEDuaCDhmLEBHKRzs8Om0bY9vczcNwcnRIYusP2KMxon3Gv2C86M2Jahig70AIq0E9C7esfrlYxFnoxUfO09XyYfiHlZY59+/dhyULp/RDIvaQ0/DqSSnYmXw8vRQ1gp6DqIzxx3j8ikUrpE7MK6348keFQj1eb83Z5w8qgIdceHHH4wbIAW7qWCPJ/vIJp8Pe1NEanlef61pDut2YcljvN79ccjX/QyqwqYv6xX2uzSlpQIDAQABAoIBACtpBCAoIVJtkv9e3EhHniR55PjWYn7SP5GEz3MtNalWokHqS/H6DBhrOcWCV5NDHx1N3qqe9xYDkzX+X6Wn/gX4RmBkte79uX8OEca8wY1DpRaT+riBWQc2vh0xlPFDuC177KX1QGFJi3V9SCzZdjSCXyV7pPyVopSm4/mmlMq5ANfN8bcHAtcArP7vPzEdckJqurjwHyzsUZJa9sk3OL3rBkKy5bmoPebE1ZQ7C+9eA4u9MKSy95WpTiqMe3rRhvr6zj4bzEvzS9M4r2EdwgAn4FyDwtGdOqtfbtSLTikb73f4MSINnWbt3YPBfRC4PGjWXIN2sMG5XYC3KH+RKbsCgYEAu0HOFInH8OtWiUY0aqRKZuo7lrBczNa5gnce3ZYnNkfrPlu1Xp0SjUkEWukznBLO0N9lvG9j3ksUDTQlPoKarJb9uf/1H0tYHhHm6mP8mH87yfVn2bLb3VPeIQYb+MXnDrwNVCAtxhuHlpnXJPldeuVKeRigHUNIEs76UMiiLqMCgYEAumJxm5NrKk0LXUQmeZolLh0lM/shg8zW7Vi3Ksz5Pe4Pcmg+hTbHjZuJwK6HesljEA0JDNkS0+5hkqiS5UDnj94XfDbi08/kKbPYA12GPVSRNTJxL8q70rFnEUZuMBeL0SKMPhEfR2z5TDDZUBoO6HBUUwgJAij1EsXrBAb0BxcCgYBKS3eKKohLi/PPjy0oynpCjtiJlvuawe7kVoLGg9aW8L3jBdvV6Bf+OmQh9bhmSggIUzo4IzHKdptECdZlEMhxhY6xh14nxmr1s0Cc6oLDtmdwX4+OjioxjB7rl1Ltxwc/j1jycbn3ieCn3e3AW7e9FNARb7XHJnSoEbq65n+CZQKBgQChLPozYAL/HIrkR0fCRmM6gmemkNeFo0CFFP+oWoJ6ZIAlHjJafmmIcmVoI0TzEG3C9pLJ8nmOnYjxCyekakEUryi9+LSkGBWlXmlBV8H7DUNYrlskyfssEs8fKDmnCuWUn3yJO8NBv+HBWkjCNRaJOIIjH0KzBHoRludJnz2tVwKBgQCsQF5lvcXefNfQojbhF+9NfyhvAc7EsMTXQhP9HEj0wVqTuuqyGyu8meXEkcQPRl6yD/yZKuMREDNNck4KV2fdGekBsh8zBgpxdHQ2DcbfxZfNgv3yoX3f0grb/ApQNJb3DVW9FVRigue8XPzFOFX/demJmkUnTg3zGFnXLXjgxg==" -) +const nssFormattedEd25519Key = "MGcCAQAwFAYHKoZIzj0CAQYJKwYBBAHaRw8BBEwwSgIBAQQgfJm5R+LK4FMwGzOpemTBXksimEVOVCE8QeC+XBBfNU+hIwMhADaif7IhYx46IHcRTy1z8LeyhABep+UB8Da6olMZGx0i" func generateKeys(t *testing.T) { t.Helper() @@ -122,39 +114,6 @@ func TestTransit_ImportNSSEd25519Key(t *testing.T) { } } -func TestTransit_ImportRSAPSS(t *testing.T) { - generateKeys(t) - b, s := createBackendWithStorage(t) - - wrappingKey, err := b.getWrappingKey(context.Background(), s) - if err != nil || wrappingKey == nil { - t.Fatalf("failed to retrieve public wrapping key: %s", err) - } - privWrappingKey := wrappingKey.Keys[strconv.Itoa(wrappingKey.LatestVersion)].RSAKey - pubWrappingKey := &privWrappingKey.PublicKey - - rawPKCS8, err := base64.StdEncoding.DecodeString(rsaPSSFormattedKey) - if err != nil { - t.Fatalf("failed to parse rsa-pss base64: %v", err) - } - - blob := wrapTargetPKCS8ForImport(t, pubWrappingKey, rawPKCS8, "SHA256") - req := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: "keys/rsa-pss/import", - Data: map[string]interface{}{ - "ciphertext": blob, - "type": "rsa-2048", - }, - } - - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import RSA-PSS private key: %v", err) - } -} - func TestTransit_Import(t *testing.T) { generateKeys(t) b, s := createBackendWithStorage(t) @@ -429,70 +388,6 @@ func TestTransit_Import(t *testing.T) { } }, ) - - t.Run( - "import public key ed25519", - func(t *testing.T) { - keyType := "ed25519" - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get keys - privateKey := getKey(t, keyType) - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatal(err) - } - - // Import key - req := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import ed25519 key: %v", err) - } - }) - - t.Run( - "import public key ecdsa", - func(t *testing.T) { - keyType := "ecdsa-p256" - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get keys - privateKey := getKey(t, keyType) - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatal(err) - } - - // Import key - req := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import public key: %s", err) - } - }) } func TestTransit_ImportVersion(t *testing.T) { @@ -639,313 +534,6 @@ func TestTransit_ImportVersion(t *testing.T) { } }, ) - - t.Run( - "import rsa public key and update version with private counterpart", - func(t *testing.T) { - keyType := "rsa-2048" - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get keys - privateKey := getKey(t, keyType) - importBlob := wrapTargetKeyForImport(t, pubWrappingKey, privateKey, keyType, "SHA256") - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatal(err) - } - - // Import RSA public key - req := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import public key: %s", err) - } - - // Update version - import RSA private key - req = &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import_version", keyID), - Data: map[string]interface{}{ - "ciphertext": importBlob, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to update key: %s", err) - } - }, - ) -} - -func TestTransit_ImportVersionWithPublicKeys(t *testing.T) { - generateKeys(t) - b, s := createBackendWithStorage(t) - - // Retrieve public wrapping key - wrappingKey, err := b.getWrappingKey(context.Background(), s) - if err != nil || wrappingKey == nil { - t.Fatalf("failed to retrieve public wrapping key: %s", err) - } - privWrappingKey := wrappingKey.Keys[strconv.Itoa(wrappingKey.LatestVersion)].RSAKey - pubWrappingKey := &privWrappingKey.PublicKey - - // Import a public key then import private should give us one key - t.Run( - "import rsa public key and update version with private counterpart", - func(t *testing.T) { - keyType := "ecdsa-p256" - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get keys - privateKey := getKey(t, keyType) - importBlob := wrapTargetKeyForImport(t, pubWrappingKey, privateKey, keyType, "SHA256") - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatal(err) - } - - // Import EC public key - req := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - "type": keyType, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import public key: %s", err) - } - - // Update version - import EC private key - req = &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import_version", keyID), - Data: map[string]interface{}{ - "ciphertext": importBlob, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to update key: %s", err) - } - - // We should have one key on export - req = &logical.Request{ - Storage: s, - Operation: logical.ReadOperation, - Path: fmt.Sprintf("export/public-key/%s", keyID), - } - resp, err := b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to export key: %s", err) - } - - if len(resp.Data["keys"].(map[string]string)) != 1 { - t.Fatalf("expected 1 key but got %v: %v", len(resp.Data["keys"].(map[string]string)), resp) - } - }, - ) - - // Import a private and then public should give us two keys - t.Run( - "import ec private key and then its public counterpart", - func(t *testing.T) { - keyType := "ecdsa-p256" - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get keys - privateKey := getKey(t, keyType) - importBlob := wrapTargetKeyForImport(t, pubWrappingKey, privateKey, keyType, "SHA256") - publicKeyBytes, err := getPublicKey(privateKey, keyType) - if err != nil { - t.Fatal(err) - } - - // Import EC private key - req := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "ciphertext": importBlob, - "type": keyType, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to update key: %s", err) - } - - // Update version - Import EC public key - req = &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import_version", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import public key: %s", err) - } - - // We should have two keys on export - req = &logical.Request{ - Storage: s, - Operation: logical.ReadOperation, - Path: fmt.Sprintf("export/public-key/%s", keyID), - } - resp, err := b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to export key: %s", err) - } - - if len(resp.Data["keys"].(map[string]string)) != 2 { - t.Fatalf("expected 2 key but got %v: %v", len(resp.Data["keys"].(map[string]string)), resp) - } - }, - ) - - // Import a public and another public should allow us to insert two private key. - t.Run( - "import two public keys and two private keys in reverse order", - func(t *testing.T) { - keyType := "ecdsa-p256" - keyID, err := uuid.GenerateUUID() - if err != nil { - t.Fatalf("failed to generate key ID: %s", err) - } - - // Get keys - privateKey1 := getKey(t, keyType) - importBlob1 := wrapTargetKeyForImport(t, pubWrappingKey, privateKey1, keyType, "SHA256") - publicKeyBytes1, err := getPublicKey(privateKey1, keyType) - if err != nil { - t.Fatal(err) - } - - privateKey2, err := generateKey(keyType) - if err != nil { - t.Fatal(err) - } - importBlob2 := wrapTargetKeyForImport(t, pubWrappingKey, privateKey2, keyType, "SHA256") - publicKeyBytes2, err := getPublicKey(privateKey2, keyType) - if err != nil { - t.Fatal(err) - } - - // Import EC public key - req := &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes1, - "type": keyType, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to update key: %s", err) - } - - // Update version - Import second EC public key - req = &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import_version", keyID), - Data: map[string]interface{}{ - "public_key": publicKeyBytes2, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import public key: %s", err) - } - - // We should have two keys on export - req = &logical.Request{ - Storage: s, - Operation: logical.ReadOperation, - Path: fmt.Sprintf("export/public-key/%s", keyID), - } - resp, err := b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to export key: %s", err) - } - - if len(resp.Data["keys"].(map[string]string)) != 2 { - t.Fatalf("expected 2 key but got %v: %v", len(resp.Data["keys"].(map[string]string)), resp) - } - - // Import second private key first, with no options. - req = &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import_version", keyID), - Data: map[string]interface{}{ - "ciphertext": importBlob2, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import private key: %s", err) - } - - // Import first private key second, with a version - req = &logical.Request{ - Storage: s, - Operation: logical.UpdateOperation, - Path: fmt.Sprintf("keys/%s/import_version", keyID), - Data: map[string]interface{}{ - "ciphertext": importBlob1, - "version": 1, - }, - } - _, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to import private key: %s", err) - } - - // We should still have two keys on export - req = &logical.Request{ - Storage: s, - Operation: logical.ReadOperation, - Path: fmt.Sprintf("export/public-key/%s", keyID), - } - resp, err = b.HandleRequest(context.Background(), req) - if err != nil { - t.Fatalf("failed to export key: %s", err) - } - - if len(resp.Data["keys"].(map[string]string)) != 2 { - t.Fatalf("expected 2 key but got %v: %v", len(resp.Data["keys"].(map[string]string)), resp) - } - }, - ) } func wrapTargetKeyForImport(t *testing.T, wrappingKey *rsa.PublicKey, targetKey interface{}, targetKeyType string, hashFnName string) string { @@ -1036,40 +624,3 @@ func generateKey(keyType string) (interface{}, error) { return nil, fmt.Errorf("failed to generate unsupported key type: %s", keyType) } } - -func getPublicKey(privateKey crypto.PrivateKey, keyType string) ([]byte, error) { - var publicKey crypto.PublicKey - var publicKeyBytes []byte - switch keyType { - case "rsa-2048", "rsa-3072", "rsa-4096": - publicKey = privateKey.(*rsa.PrivateKey).Public() - case "ecdsa-p256", "ecdsa-p384", "ecdsa-p521": - publicKey = privateKey.(*ecdsa.PrivateKey).Public() - case "ed25519": - publicKey = privateKey.(ed25519.PrivateKey).Public() - default: - return publicKeyBytes, fmt.Errorf("failed to get public key from %s key", keyType) - } - - publicKeyBytes, err := publicKeyToBytes(publicKey) - if err != nil { - return publicKeyBytes, err - } - - return publicKeyBytes, nil -} - -func publicKeyToBytes(publicKey crypto.PublicKey) ([]byte, error) { - var publicKeyBytesPem []byte - publicKeyBytes, err := x509.MarshalPKIXPublicKey(publicKey) - if err != nil { - return publicKeyBytesPem, fmt.Errorf("failed to marshal public key: %s", err) - } - - pemBlock := &pem.Block{ - Type: "PUBLIC KEY", - Bytes: publicKeyBytes, - } - - return pem.EncodeToMemory(pemBlock), nil -} diff --git a/builtin/logical/transit/path_keys.go b/builtin/logical/transit/path_keys.go index 285f32d33cdce8..c8e6b6b7b49449 100644 --- a/builtin/logical/transit/path_keys.go +++ b/builtin/logical/transit/path_keys.go @@ -1,11 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( "context" - "crypto" "crypto/elliptic" "crypto/x509" "encoding/base64" @@ -26,11 +22,6 @@ func (b *backend) pathListKeys() *framework.Path { return &framework.Path{ Pattern: "keys/?$", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationSuffix: "keys", - }, - Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ListOperation: b.pathKeysList, }, @@ -43,12 +34,6 @@ func (b *backend) pathListKeys() *framework.Path { func (b *backend) pathKeys() *framework.Path { return &framework.Path{ Pattern: "keys/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationSuffix: "key", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -133,25 +118,10 @@ key.`, }, }, - Operations: map[logical.Operation]framework.OperationHandler{ - logical.UpdateOperation: &framework.PathOperation{ - Callback: b.pathPolicyWrite, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "create", - }, - }, - logical.DeleteOperation: &framework.PathOperation{ - Callback: b.pathPolicyDelete, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "delete", - }, - }, - logical.ReadOperation: &framework.PathOperation{ - Callback: b.pathPolicyRead, - DisplayAttrs: &framework.DisplayAttributes{ - OperationVerb: "read", - }, - }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.pathPolicyWrite, + logical.DeleteOperation: b.pathPolicyDelete, + logical.ReadOperation: b.pathPolicyRead, }, HelpSynopsis: pathPolicyHelpSyn, @@ -257,14 +227,12 @@ func (b *backend) pathPolicyWrite(ctx context.Context, req *logical.Request, d * p.Unlock() } - resp, err := b.formatKeyPolicy(p, nil) - if err != nil { - return nil, err - } + resp := &logical.Response{} if !upserted { resp.AddWarning(fmt.Sprintf("key %s already existed", name)) } - return resp, nil + + return nil, nil } // Built-in helper type for returning asymmetric keys @@ -292,19 +260,6 @@ func (b *backend) pathPolicyRead(ctx context.Context, req *logical.Request, d *f } defer p.Unlock() - contextRaw := d.Get("context").(string) - var context []byte - if len(contextRaw) != 0 { - context, err = base64.StdEncoding.DecodeString(contextRaw) - if err != nil { - return logical.ErrorResponse("failed to base64-decode context"), logical.ErrInvalidRequest - } - } - - return b.formatKeyPolicy(p, context) -} - -func (b *backend) formatKeyPolicy(p *keysutil.Policy, context []byte) (*logical.Response, error) { // Return the response resp := &logical.Response{ Data: map[string]interface{}{ @@ -361,6 +316,15 @@ func (b *backend) formatKeyPolicy(p *keysutil.Policy, context []byte) (*logical. } } + contextRaw := d.Get("context").(string) + var context []byte + if len(contextRaw) != 0 { + context, err = base64.StdEncoding.DecodeString(contextRaw) + if err != nil { + return logical.ErrorResponse("failed to base64-decode context"), logical.ErrInvalidRequest + } + } + switch p.Type { case keysutil.KeyType_AES128_GCM96, keysutil.KeyType_AES256_GCM96, keysutil.KeyType_ChaCha20_Poly1305: retKeys := map[string]int64{} @@ -398,7 +362,7 @@ func (b *backend) formatKeyPolicy(p *keysutil.Policy, context []byte) (*logical. } derived, err := p.GetKey(context, ver, 32) if err != nil { - return nil, fmt.Errorf("failed to derive key to return public component: %w", err) + return nil, fmt.Errorf("failed to derive key to return public component") } pubKey := ed25519.PrivateKey(derived).Public().(ed25519.PublicKey) key.PublicKey = base64.StdEncoding.EncodeToString(pubKey) @@ -415,15 +379,9 @@ func (b *backend) formatKeyPolicy(p *keysutil.Policy, context []byte) (*logical. key.Name = "rsa-4096" } - var publicKey crypto.PublicKey - publicKey = v.RSAPublicKey - if !v.IsPrivateKeyMissing() { - publicKey = v.RSAKey.Public() - } - // Encode the RSA public key in PEM format to return over the // API - derBytes, err := x509.MarshalPKIXPublicKey(publicKey) + derBytes, err := x509.MarshalPKIXPublicKey(v.RSAKey.Public()) if err != nil { return nil, fmt.Errorf("error marshaling RSA public key: %w", err) } diff --git a/builtin/logical/transit/path_keys_config.go b/builtin/logical/transit/path_keys_config.go index 722d39c1e3b5f2..f2628e4f0309eb 100644 --- a/builtin/logical/transit/path_keys_config.go +++ b/builtin/logical/transit/path_keys_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -16,13 +13,6 @@ import ( func (b *backend) pathKeysConfig() *framework.Path { return &framework.Path{ Pattern: "keys/" + framework.GenericNameRegex("name") + "/config", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "configure", - OperationSuffix: "key", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -97,8 +87,6 @@ func (b *backend) pathKeysConfigWrite(ctx context.Context, req *logical.Request, } defer p.Unlock() - var warning string - originalMinDecryptionVersion := p.MinDecryptionVersion originalMinEncryptionVersion := p.MinEncryptionVersion originalDeletionAllowed := p.DeletionAllowed @@ -115,6 +103,8 @@ func (b *backend) pathKeysConfigWrite(ctx context.Context, req *logical.Request, } }() + resp = &logical.Response{} + persistNeeded := false minDecryptionVersionRaw, ok := d.GetOk("min_decryption_version") @@ -127,7 +117,7 @@ func (b *backend) pathKeysConfigWrite(ctx context.Context, req *logical.Request, if minDecryptionVersion == 0 { minDecryptionVersion = 1 - warning = "since Vault 0.3, transit key numbering starts at 1; forcing minimum to 1" + resp.AddWarning("since Vault 0.3, transit key numbering starts at 1; forcing minimum to 1") } if minDecryptionVersion != p.MinDecryptionVersion { @@ -221,14 +211,7 @@ func (b *backend) pathKeysConfigWrite(ctx context.Context, req *logical.Request, } if !persistNeeded { - resp, err := b.formatKeyPolicy(p, nil) - if err != nil { - return nil, err - } - if warning != "" { - resp.AddWarning(warning) - } - return resp, nil + return nil, nil } switch { @@ -238,18 +221,11 @@ func (b *backend) pathKeysConfigWrite(ctx context.Context, req *logical.Request, return logical.ErrorResponse("min decryption version should not be less then min available version"), nil } - if err := p.Persist(ctx, req.Storage); err != nil { - return nil, err + if len(resp.Warnings) == 0 { + return nil, p.Persist(ctx, req.Storage) } - resp, err = b.formatKeyPolicy(p, nil) - if err != nil { - return nil, err - } - if warning != "" { - resp.AddWarning(warning) - } - return resp, nil + return resp, p.Persist(ctx, req.Storage) } const pathKeysConfigHelpSyn = `Configure a named encryption key` diff --git a/builtin/logical/transit/path_keys_config_test.go b/builtin/logical/transit/path_keys_config_test.go index 335607c3b0e1d5..f6dee45090dc47 100644 --- a/builtin/logical/transit/path_keys_config_test.go +++ b/builtin/logical/transit/path_keys_config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_keys_test.go b/builtin/logical/transit/path_keys_test.go index 4b3303988eed51..04c1d8da092dea 100644 --- a/builtin/logical/transit/path_keys_test.go +++ b/builtin/logical/transit/path_keys_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit_test import ( diff --git a/builtin/logical/transit/path_random.go b/builtin/logical/transit/path_random.go index 3fc5abef286c32..3b903e0b37fe38 100644 --- a/builtin/logical/transit/path_random.go +++ b/builtin/logical/transit/path_random.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -14,13 +11,6 @@ import ( func (b *backend) pathRandom() *framework.Path { return &framework.Path{ Pattern: "random(/" + framework.GenericNameRegex("source") + ")?" + framework.OptionalParamRegex("urlbytes"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "generate", - OperationSuffix: "random|random-with-source|random-with-bytes|random-with-source-and-bytes", - }, - Fields: map[string]*framework.FieldSchema{ "urlbytes": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_random_test.go b/builtin/logical/transit/path_random_test.go index 35782ec3eadc8b..037a00b55f087e 100644 --- a/builtin/logical/transit/path_random_test.go +++ b/builtin/logical/transit/path_random_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_restore.go b/builtin/logical/transit/path_restore.go index 4df9d69226ed69..fa8c142bbab36d 100644 --- a/builtin/logical/transit/path_restore.go +++ b/builtin/logical/transit/path_restore.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -15,13 +12,6 @@ import ( func (b *backend) pathRestore() *framework.Path { return &framework.Path{ Pattern: "restore" + framework.OptionalParamRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "restore", - OperationSuffix: "key|and-rename-key", - }, - Fields: map[string]*framework.FieldSchema{ "backup": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_restore_test.go b/builtin/logical/transit/path_restore_test.go index 3dcc552d98b8ad..6e13b985ee65d3 100644 --- a/builtin/logical/transit/path_restore_test.go +++ b/builtin/logical/transit/path_restore_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_rewrap.go b/builtin/logical/transit/path_rewrap.go index 35d5c68a388829..2bb4bcbc995aaf 100644 --- a/builtin/logical/transit/path_rewrap.go +++ b/builtin/logical/transit/path_rewrap.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -19,12 +16,6 @@ import ( func (b *backend) pathRewrap() *framework.Path { return &framework.Path{ Pattern: "rewrap/" + framework.GenericNameRegex("name"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "rewrap", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, diff --git a/builtin/logical/transit/path_rewrap_test.go b/builtin/logical/transit/path_rewrap_test.go index 097626c1c28ae3..04281a183752d0 100644 --- a/builtin/logical/transit/path_rewrap_test.go +++ b/builtin/logical/transit/path_rewrap_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_rotate.go b/builtin/logical/transit/path_rotate.go index 0035dcfbb9768f..0a14785871c704 100644 --- a/builtin/logical/transit/path_rotate.go +++ b/builtin/logical/transit/path_rotate.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -14,13 +11,6 @@ import ( func (b *backend) pathRotate() *framework.Path { return &framework.Path{ Pattern: "keys/" + framework.GenericNameRegex("name") + "/rotate", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "rotate", - OperationSuffix: "key", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -64,7 +54,6 @@ func (b *backend) pathRotateWrite(ctx context.Context, req *logical.Request, d * if !b.System().CachingDisabled() { p.Lock(true) } - defer p.Unlock() if p.Type == keysutil.KeyType_MANAGED_KEY { var keyId string @@ -79,11 +68,8 @@ func (b *backend) pathRotateWrite(ctx context.Context, req *logical.Request, d * err = p.Rotate(ctx, req.Storage, b.GetRandomReader()) } - if err != nil { - return nil, err - } - - return b.formatKeyPolicy(p, nil) + p.Unlock() + return nil, err } const pathRotateHelpSyn = `Rotate named encryption key` diff --git a/builtin/logical/transit/path_sign_verify.go b/builtin/logical/transit/path_sign_verify.go index 98ced3812ab9fe..5ef1dcc026c206 100644 --- a/builtin/logical/transit/path_sign_verify.go +++ b/builtin/logical/transit/path_sign_verify.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -29,12 +26,12 @@ type batchResponseSignItem struct { // request item Signature string `json:"signature,omitempty" mapstructure:"signature"` - // The key version to be used for signing + // The key version to be used for encryption KeyVersion int `json:"key_version" mapstructure:"key_version"` PublicKey []byte `json:"publickey,omitempty" mapstructure:"publickey"` - // Error, if set represents a failure encountered while signing a + // Error, if set represents a failure encountered while encrypting a // corresponding batch request item Error string `json:"error,omitempty" mapstructure:"error"` @@ -58,7 +55,7 @@ type batchResponseVerifyItem struct { // Valid indicates whether signature matches the signature derived from the input string Valid bool `json:"valid" mapstructure:"valid"` - // Error, if set represents a failure encountered while verifying a + // Error, if set represents a failure encountered while encrypting a // corresponding batch request item Error string `json:"error,omitempty" mapstructure:"error"` @@ -78,13 +75,6 @@ const defaultHashAlgorithm = "sha2-256" func (b *backend) pathSign() *framework.Path { return &framework.Path{ Pattern: "sign/" + framework.GenericNameRegex("name") + framework.OptionalParamRegex("urlalgorithm"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "sign", - OperationSuffix: "|with-algorithm", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -187,13 +177,6 @@ preserve the order of the batch input`, func (b *backend) pathVerify() *framework.Path { return &framework.Path{ Pattern: "verify/" + framework.GenericNameRegex("name") + framework.OptionalParamRegex("urlalgorithm"), - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "verify", - OperationSuffix: "|with-algorithm", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -259,7 +242,7 @@ none on signing path.`, "signature_algorithm": { Type: framework.TypeString, - Description: `The signature algorithm to use for signature verification. Currently only applies to RSA key types. + Description: `The signature algorithm to use for signature verification. Currently only applies to RSA key types. Options are 'pss' or 'pkcs1v15'. Defaults to 'pss'`, }, @@ -366,7 +349,7 @@ func (b *backend) pathSignWrite(ctx context.Context, req *logical.Request, d *fr return nil, err } if p == nil { - return logical.ErrorResponse("signing key not found"), logical.ErrInvalidRequest + return logical.ErrorResponse("encryption key not found"), logical.ErrInvalidRequest } if !b.System().CachingDisabled() { p.Lock(false) @@ -619,7 +602,7 @@ func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d * return nil, err } if p == nil { - return logical.ErrorResponse("signature verification key not found"), logical.ErrInvalidRequest + return logical.ErrorResponse("encryption key not found"), logical.ErrInvalidRequest } if !b.System().CachingDisabled() { p.Lock(false) diff --git a/builtin/logical/transit/path_sign_verify_test.go b/builtin/logical/transit/path_sign_verify_test.go index 63aef9c80b6f1a..e679a089729caf 100644 --- a/builtin/logical/transit/path_sign_verify_test.go +++ b/builtin/logical/transit/path_sign_verify_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_trim.go b/builtin/logical/transit/path_trim.go index 71f4181db859a7..60d6ef9dda6d8a 100644 --- a/builtin/logical/transit/path_trim.go +++ b/builtin/logical/transit/path_trim.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -14,13 +11,6 @@ import ( func (b *backend) pathTrim() *framework.Path { return &framework.Path{ Pattern: "keys/" + framework.GenericNameRegex("name") + "/trim", - - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationVerb: "trim", - OperationSuffix: "key", - }, - Fields: map[string]*framework.FieldSchema{ "name": { Type: framework.TypeString, @@ -100,7 +90,7 @@ func (b *backend) pathTrimUpdate() framework.OperationFunc { return nil, err } - return b.formatKeyPolicy(p, nil) + return nil, nil } } diff --git a/builtin/logical/transit/path_trim_test.go b/builtin/logical/transit/path_trim_test.go index b63d644cba8ced..db38aad938ad09 100644 --- a/builtin/logical/transit/path_trim_test.go +++ b/builtin/logical/transit/path_trim_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/path_wrapping_key.go b/builtin/logical/transit/path_wrapping_key.go index f27a32ade50452..1a08318db339dc 100644 --- a/builtin/logical/transit/path_wrapping_key.go +++ b/builtin/logical/transit/path_wrapping_key.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( @@ -20,10 +17,6 @@ const WrappingKeyName = "wrapping-key" func (b *backend) pathWrappingKey() *framework.Path { return &framework.Path{ Pattern: "wrapping_key", - DisplayAttrs: &framework.DisplayAttributes{ - OperationPrefix: operationPrefixTransit, - OperationSuffix: "wrapping-key", - }, Callbacks: map[logical.Operation]framework.OperationFunc{ logical.ReadOperation: b.pathWrappingKeyRead, }, diff --git a/builtin/logical/transit/path_wrapping_key_test.go b/builtin/logical/transit/path_wrapping_key_test.go index 468c3f4de7ee26..da90585a4ac990 100644 --- a/builtin/logical/transit/path_wrapping_key_test.go +++ b/builtin/logical/transit/path_wrapping_key_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/logical/transit/stepwise_test.go b/builtin/logical/transit/stepwise_test.go index 2b40cea2132264..6d0d193dad280b 100644 --- a/builtin/logical/transit/stepwise_test.go +++ b/builtin/logical/transit/stepwise_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package transit import ( diff --git a/builtin/plugin/backend.go b/builtin/plugin/backend.go index 04606bcbd2883a..b165a10c1ec110 100644 --- a/builtin/plugin/backend.go +++ b/builtin/plugin/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( @@ -86,15 +83,23 @@ func Backend(ctx context.Context, conf *logical.BackendConfig) (*PluginBackend, runningVersion = versioner.PluginVersion().Version } + external := false + if externaler, ok := raw.(logical.Externaler); ok { + external = externaler.IsExternal() + } + // Cleanup meta plugin backend raw.Cleanup(ctx) // Initialize b.Backend with placeholder backend since plugin // backends will need to be lazy loaded. - b.Backend = &framework.Backend{ - PathsSpecial: paths, - BackendType: btype, - RunningVersion: runningVersion, + b.Backend = &placeholderBackend{ + Backend: framework.Backend{ + PathsSpecial: paths, + BackendType: btype, + RunningVersion: runningVersion, + }, + external: external, } b.config = conf @@ -102,6 +107,23 @@ func Backend(ctx context.Context, conf *logical.BackendConfig) (*PluginBackend, return &b, nil } +// placeholderBackend is used a placeholder before a backend is lazy-loaded. +// It is mostly used to mark that the backend is an external backend. +type placeholderBackend struct { + framework.Backend + + external bool +} + +func (p *placeholderBackend) IsExternal() bool { + return p.external +} + +var ( + _ logical.Externaler = (*placeholderBackend)(nil) + _ logical.PluginVersioner = (*placeholderBackend)(nil) +) + // PluginBackend is a thin wrapper around plugin.BackendPluginClient type PluginBackend struct { Backend logical.Backend @@ -301,4 +323,14 @@ func (b *PluginBackend) PluginVersion() logical.PluginVersion { return logical.EmptyPluginVersion } -var _ logical.PluginVersioner = (*PluginBackend)(nil) +func (b *PluginBackend) IsExternal() bool { + if externaler, ok := b.Backend.(logical.Externaler); ok { + return externaler.IsExternal() + } + return false +} + +var ( + _ logical.PluginVersioner = (*PluginBackend)(nil) + _ logical.Externaler = (*PluginBackend)(nil) +) diff --git a/builtin/plugin/backend_lazyLoad_test.go b/builtin/plugin/backend_lazyLoad_test.go index b2f6303ba6ac91..4d2727037adcaa 100644 --- a/builtin/plugin/backend_lazyLoad_test.go +++ b/builtin/plugin/backend_lazyLoad_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/builtin/plugin/backend_test.go b/builtin/plugin/backend_test.go index 28dd1e348331ef..d7a678ba1f6b66 100644 --- a/builtin/plugin/backend_test.go +++ b/builtin/plugin/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin_test import ( diff --git a/builtin/plugin/mock_plugin_test.go b/builtin/plugin/mock_plugin_test.go index 9279c828f1da36..532b7c763286dd 100644 --- a/builtin/plugin/mock_plugin_test.go +++ b/builtin/plugin/mock_plugin_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/builtin/plugin/v5/backend.go b/builtin/plugin/v5/backend.go index eac311b4ad60df..3f7a9a884ce776 100644 --- a/builtin/plugin/v5/backend.go +++ b/builtin/plugin/v5/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/changelog/17919.txt b/changelog/17919.txt deleted file mode 100644 index 8fbb41db44a3b8..00000000000000 --- a/changelog/17919.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -api: property based testing for LifetimeWatcher sleep duration calculation -``` diff --git a/changelog/17934.txt b/changelog/17934.txt deleted file mode 100644 index 7f087a915a2854..00000000000000 --- a/changelog/17934.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/transit: Add support to import public keys in transit engine and allow encryption and verification of signed data -``` diff --git a/changelog/18186.txt b/changelog/18186.txt deleted file mode 100644 index 13710826284e8d..00000000000000 --- a/changelog/18186.txt +++ /dev/null @@ -1,6 +0,0 @@ -```release-note:breaking-change -secrets/pki: Maintaining running count of certificates will be turned off by default. -To re-enable keeping these metrics available on the tidy status endpoint, enable -maintain_stored_certificate_counts on tidy-config, to also publish them to the -metrics consumer, enable publish_stored_certificate_count_metrics . -``` \ No newline at end of file diff --git a/changelog/18225.txt b/changelog/18225.txt deleted file mode 100644 index 567c3c78da95e1..00000000000000 --- a/changelog/18225.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -auth/ldap: allow providing the LDAP password via an env var when authenticating via the CLI -``` diff --git a/changelog/18230.txt b/changelog/18230.txt deleted file mode 100644 index 335f9670db2aea..00000000000000 --- a/changelog/18230.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -auth/ldap: allow configuration of alias dereferencing in LDAP search -``` diff --git a/changelog/18376.txt b/changelog/18376.txt deleted file mode 100644 index 1edc3df5a1ad1e..00000000000000 --- a/changelog/18376.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: Add openapi response definitions to pki/config_*.go -``` diff --git a/changelog/18465.txt b/changelog/18465.txt deleted file mode 100644 index 928da99bc4fefd..00000000000000 --- a/changelog/18465.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response defintions to /sys/auth endpoints -``` \ No newline at end of file diff --git a/changelog/18468.txt b/changelog/18468.txt deleted file mode 100644 index 362bf05018c5bf..00000000000000 --- a/changelog/18468.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response defintions to /sys/capabilities endpoints -``` \ No newline at end of file diff --git a/changelog/18472.txt b/changelog/18472.txt deleted file mode 100644 index e34d53afc2f685..00000000000000 --- a/changelog/18472.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response defintions to /sys/config and /sys/generate-root endpoints -``` \ No newline at end of file diff --git a/changelog/18515.txt b/changelog/18515.txt deleted file mode 100644 index 86eb71b1916777..00000000000000 --- a/changelog/18515.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: Add openapi response definitions to vault/logical_system_paths.go defined endpoints. -``` diff --git a/changelog/18521.txt b/changelog/18521.txt deleted file mode 100644 index 4111aea2c98e49..00000000000000 --- a/changelog/18521.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -ui: wait for wanted message event during OIDC callback instead of using the first message event -``` diff --git a/changelog/18542.txt b/changelog/18542.txt deleted file mode 100644 index ff4674010f4b74..00000000000000 --- a/changelog/18542.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response definitions to /sys/internal endpoints -``` diff --git a/changelog/18624.txt b/changelog/18624.txt deleted file mode 100644 index 91209bb46d9e23..00000000000000 --- a/changelog/18624.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response definitions to /sys/rotate endpoints -``` diff --git a/changelog/18625.txt b/changelog/18625.txt deleted file mode 100644 index 526d6b63e6f6bf..00000000000000 --- a/changelog/18625.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response definitions to /sys/seal endpoints -``` \ No newline at end of file diff --git a/changelog/18626.txt b/changelog/18626.txt deleted file mode 100644 index 6bb2ba0f4d89be..00000000000000 --- a/changelog/18626.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response definitions to /sys/tool endpoints -``` \ No newline at end of file diff --git a/changelog/18627.txt b/changelog/18627.txt deleted file mode 100644 index e2a4dfb5f2e618..00000000000000 --- a/changelog/18627.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response definitions to /sys/wrapping endpoints -``` \ No newline at end of file diff --git a/changelog/18628.txt b/changelog/18628.txt deleted file mode 100644 index 0722856c93b93a..00000000000000 --- a/changelog/18628.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: add openapi response definitions to /sys/version-history, /sys/leader, /sys/ha-status, /sys/host-info, /sys/in-flight-req -``` \ No newline at end of file diff --git a/changelog/18633.txt b/changelog/18633.txt deleted file mode 100644 index 2048c46d914ece..00000000000000 --- a/changelog/18633.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: Add openapi response definitions to /sys defined endpoints. -``` \ No newline at end of file diff --git a/changelog/18729.txt b/changelog/18729.txt deleted file mode 100644 index 975d0274bc6f9b..00000000000000 --- a/changelog/18729.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -sdk/backend: prevent panic when computing the zero value for a `TypeInt64` schema field. -``` \ No newline at end of file diff --git a/changelog/18863.txt b/changelog/18863.txt deleted file mode 100644 index c1f2800c289043..00000000000000 --- a/changelog/18863.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -agent: JWT auto-auth has a new config option, `remove_jwt_follows_symlinks` (default: false), that, if set to true will now remove the JWT, instead of the symlink to the JWT, if a symlink to a JWT has been provided in the `path` option, and the `remove_jwt_after_reading` config option is set to true (default). -``` \ No newline at end of file diff --git a/changelog/18870.txt b/changelog/18870.txt deleted file mode 100644 index 1b694895fec619..00000000000000 --- a/changelog/18870.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -core:provide more descriptive error message when calling enterprise feature paths in open-source -``` \ No newline at end of file diff --git a/changelog/19002.txt b/changelog/19002.txt deleted file mode 100644 index d1a1ff5371ab46..00000000000000 --- a/changelog/19002.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -agent: Added `reload` option to cert auth configuration in case of external renewals of local x509 key-pairs. -``` \ No newline at end of file diff --git a/changelog/19043.txt b/changelog/19043.txt deleted file mode 100644 index 20a1a77bb7890b..00000000000000 --- a/changelog/19043.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: added ability to validate response structures against openapi schema for test clusters -``` \ No newline at end of file diff --git a/changelog/19103.txt b/changelog/19103.txt deleted file mode 100644 index 868db6226f9432..00000000000000 --- a/changelog/19103.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/database: Adds error message requiring password on root crednetial rotation. -``` \ No newline at end of file diff --git a/changelog/19135.txt b/changelog/19135.txt deleted file mode 100644 index a3e085b5a5805c..00000000000000 --- a/changelog/19135.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -ui (enterprise): Fix cancel button from transform engine role creation page -``` diff --git a/changelog/19139.txt b/changelog/19139.txt deleted file mode 100644 index 75e9a7847e17fa..00000000000000 --- a/changelog/19139.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -ui: fixes bug in kmip role form that caused `operation_all` to persist after deselecting all operation checkboxes -``` diff --git a/changelog/19160.txt b/changelog/19160.txt deleted file mode 100644 index 66a3baa1575833..00000000000000 --- a/changelog/19160.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -api: Addressed a couple of issues that arose as edge cases for the -output-policy flag. Specifically around properly handling list commands, distinguishing kv V1/V2, and correctly recognizing protected paths. -``` \ No newline at end of file diff --git a/changelog/19170.txt b/changelog/19170.txt deleted file mode 100644 index 9a421dd183a2dd..00000000000000 --- a/changelog/19170.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -website/docs: fix database static-user sample payload -``` diff --git a/changelog/19187.txt b/changelog/19187.txt deleted file mode 100644 index c04234a1bb9bac..00000000000000 --- a/changelog/19187.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -website/docs: Add rotate root documentation for azure secrets engine -``` diff --git a/changelog/19215.txt b/changelog/19215.txt deleted file mode 100644 index 33fea94666bd15..00000000000000 --- a/changelog/19215.txt +++ /dev/null @@ -1,5 +0,0 @@ -```release-note:feature -**Secrets/Auth Plugin Multiplexing**: The plugin will be multiplexed when run -as an external plugin by vault versions that support secrets/auth plugin -multiplexing (> 1.12) -``` diff --git a/changelog/19230.txt b/changelog/19230.txt new file mode 100644 index 00000000000000..ab2853d45edbee --- /dev/null +++ b/changelog/19230.txt @@ -0,0 +1,4 @@ +```release-note:feature +**User Lockout**: Adds support to configure the user-lockout behaviour for failed logins to prevent +brute force attacks for userpass, approle and ldap auth methods. +``` \ No newline at end of file diff --git a/changelog/19247.txt b/changelog/19247.txt deleted file mode 100644 index f51e8479c97f1f..00000000000000 --- a/changelog/19247.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -auth/oidc: Adds support for group membership parsing when using IBM ISAM as an OIDC provider. -``` diff --git a/changelog/19252.txt b/changelog/19252.txt deleted file mode 100644 index 99121351d98ca4..00000000000000 --- a/changelog/19252.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: Consistently stop Vault server on exit in gen_openapi.sh -``` diff --git a/changelog/19260.txt b/changelog/19260.txt deleted file mode 100644 index 77138a38607cbe..00000000000000 --- a/changelog/19260.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:feature -**agent/auto-auth:**: Add OCI (Oracle Cloud Infrastructure) auto-auth method -``` diff --git a/changelog/19296.txt b/changelog/19296.txt deleted file mode 100644 index 1ef62a0cde2e65..00000000000000 --- a/changelog/19296.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:feature -**Sidebar Navigation in UI**: A new sidebar navigation panel has been added in the UI to replace the top navigation bar. -``` \ No newline at end of file diff --git a/changelog/19319.txt b/changelog/19319.txt deleted file mode 100644 index 4702344afb081d..00000000000000 --- a/changelog/19319.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -openapi: Improve operationId/request/response naming strategy -``` diff --git a/changelog/19365.txt b/changelog/19365.txt deleted file mode 100644 index 774c750f495133..00000000000000 --- a/changelog/19365.txt +++ /dev/null @@ -1,7 +0,0 @@ -```release-note: enhancement -auth/aws: Support request cancellation with AWS requests -``` - -```release-note: enhancement -secrets/aws: Support request cancellation with AWS requests -``` diff --git a/changelog/19378.txt b/changelog/19378.txt deleted file mode 100644 index 40a1e82fcb6425..00000000000000 --- a/changelog/19378.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -cli/kv: add -mount flag to kv list -``` diff --git a/changelog/19416.txt b/changelog/19416.txt deleted file mode 100644 index f2a7d3275b64ac..00000000000000 --- a/changelog/19416.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -auth/token: Fix cubbyhole and revocation for legacy service tokens -``` diff --git a/changelog/19468.txt b/changelog/19468.txt deleted file mode 100644 index 5afce90eb65f36..00000000000000 --- a/changelog/19468.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -plugin/reload: Fix a possible data race with rollback manager and plugin reload -``` diff --git a/changelog/19495.txt b/changelog/19495.txt deleted file mode 100644 index dac2ca00dfb874..00000000000000 --- a/changelog/19495.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -shamir: change mul and div implementations to be constant-time -``` \ No newline at end of file diff --git a/changelog/19519.txt b/changelog/19519.txt deleted file mode 100644 index 6756f62b2d47ab..00000000000000 --- a/changelog/19519.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/transit: Allow importing RSA-PSS OID (1.2.840.113549.1.1.10) private keys via BYOK. -``` diff --git a/changelog/19520.txt b/changelog/19520.txt deleted file mode 100644 index 726be2c13a60b6..00000000000000 --- a/changelog/19520.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -http: Support responding to HEAD operation from plugins -``` diff --git a/changelog/19616.txt b/changelog/19616.txt deleted file mode 100644 index 3afcc608d19a99..00000000000000 --- a/changelog/19616.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/postgresql: Add configuration to scram-sha-256 encrypt passwords on Vault before sending them to PostgreSQL -``` \ No newline at end of file diff --git a/changelog/19776.txt b/changelog/19776.txt deleted file mode 100644 index 786cfd321673fb..00000000000000 --- a/changelog/19776.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -agent: Vault Agent now reports its name and version as part of the User-Agent header in all requests issued. -``` diff --git a/changelog/19791.txt b/changelog/19791.txt deleted file mode 100644 index 26722cde313342..00000000000000 --- a/changelog/19791.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: add allowed_managed_keys field to secret engine mount options -``` diff --git a/changelog/19798.txt b/changelog/19798.txt deleted file mode 100644 index 4bae8b637897b1..00000000000000 --- a/changelog/19798.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/terraform: upgrades dependencies -``` \ No newline at end of file diff --git a/changelog/19814.txt b/changelog/19814.txt deleted file mode 100644 index 687527efca8a8d..00000000000000 --- a/changelog/19814.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -audit: add plugin metadata, including plugin name, type, version, sha256, and whether plugin is external, to audit logging -``` \ No newline at end of file diff --git a/changelog/19829.txt b/changelog/19829.txt deleted file mode 100644 index e8472b2717edb5..00000000000000 --- a/changelog/19829.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/ad: upgrades dependencies -``` \ No newline at end of file diff --git a/changelog/19846.txt b/changelog/19846.txt deleted file mode 100644 index 269b11797b9e83..00000000000000 --- a/changelog/19846.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/alicloud: upgrades dependencies -``` diff --git a/changelog/19861.txt b/changelog/19861.txt deleted file mode 100644 index ee5bc703e9cbe6..00000000000000 --- a/changelog/19861.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/mongodbatlas: upgrades dependencies -``` \ No newline at end of file diff --git a/changelog/19862.txt b/changelog/19862.txt deleted file mode 100644 index c1ce6d8bb7105c..00000000000000 --- a/changelog/19862.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -build: Prefer GOBIN when set over GOPATH/bin when building the binary -``` diff --git a/changelog/19878.txt b/changelog/19878.txt deleted file mode 100644 index 4135434b792383..00000000000000 --- a/changelog/19878.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: Remove the Bulma CSS framework. -``` \ No newline at end of file diff --git a/changelog/19891.txt b/changelog/19891.txt deleted file mode 100644 index b030151e858b2e..00000000000000 --- a/changelog/19891.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -core (enterprise): add configuration for license reporting -``` \ No newline at end of file diff --git a/changelog/19901.txt b/changelog/19901.txt deleted file mode 100644 index 8e0bbbddb5ec69..00000000000000 --- a/changelog/19901.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: Updates UI javascript dependencies -``` \ No newline at end of file diff --git a/changelog/19913.txt b/changelog/19913.txt deleted file mode 100644 index eccdec6533ad27..00000000000000 --- a/changelog/19913.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: Adds whitespace warning to secrets engine and auth method path inputs -``` \ No newline at end of file diff --git a/changelog/19954.txt b/changelog/19954.txt deleted file mode 100644 index e0ff45f87d22cc..00000000000000 --- a/changelog/19954.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -database/mongodb: upgrade mongo driver to 1.11 -``` diff --git a/changelog/19993.txt b/changelog/19993.txt deleted file mode 100644 index 90650863ab887b..00000000000000 --- a/changelog/19993.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/openldap: upgrades dependencies -``` \ No newline at end of file diff --git a/changelog/20073.txt b/changelog/20073.txt deleted file mode 100644 index 10c21a58ba5227..00000000000000 --- a/changelog/20073.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -core/activity: refactor the activity log's generation of precomputed queries -``` \ No newline at end of file diff --git a/changelog/20078.txt b/changelog/20078.txt deleted file mode 100644 index 8749354b315d78..00000000000000 --- a/changelog/20078.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -core/activity: error when attempting to update retention configuration below the minimum -``` \ No newline at end of file diff --git a/changelog/20086.txt b/changelog/20086.txt deleted file mode 100644 index 9511c97b66e31f..00000000000000 --- a/changelog/20086.txt +++ /dev/null @@ -1,4 +0,0 @@ -```release-note:improvement -api: `/sys/internal/counters/config` endpoint now contains read-only -`reporting_enabled` and `billing_start_timestamp` fields. -``` diff --git a/changelog/20125.txt b/changelog/20125.txt deleted file mode 100644 index 07dd8201dba895..00000000000000 --- a/changelog/20125.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: updates clients configuration edit form state based on census reporting configuration -``` \ No newline at end of file diff --git a/changelog/20150.txt b/changelog/20150.txt deleted file mode 100644 index 0ea8259f9e663d..00000000000000 --- a/changelog/20150.txt +++ /dev/null @@ -1,4 +0,0 @@ -```release-note:improvement -api: `/sys/internal/counters/config` endpoint now contains read-only -`minimum_retention_months`. -``` diff --git a/changelog/20163.txt b/changelog/20163.txt deleted file mode 100644 index 0b845fbae0db74..00000000000000 --- a/changelog/20163.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: adds warning for commas in stringArray inputs and updates tooltip help text to remove references to comma separation -``` diff --git a/changelog/20224.txt b/changelog/20224.txt deleted file mode 100644 index 7ec5bf612177d4..00000000000000 --- a/changelog/20224.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -command/server: New -dev-cluster-json writes a file describing the dev cluster in -dev and -dev-three-node modes, plus -dev-three-node now enables unauthenticated metrics and pprof requests. -``` diff --git a/changelog/20234.txt b/changelog/20234.txt deleted file mode 100644 index 1f20bdc5a92094..00000000000000 --- a/changelog/20234.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -auth/cert: Better return OCSP validation errors during login to the caller. -``` diff --git a/changelog/20247.txt b/changelog/20247.txt deleted file mode 100644 index 91f2f0d23fcdf4..00000000000000 --- a/changelog/20247.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -sdk: Add new docker-based cluster testing framework to the sdk. -``` diff --git a/changelog/20253.txt b/changelog/20253.txt deleted file mode 100644 index 19edae1bc4f2ee..00000000000000 --- a/changelog/20253.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/pki: Add warning when issuer lacks KeyUsage during CRL rebuilds; expose in logs and on rotation. -``` diff --git a/changelog/20261.txt b/changelog/20261.txt deleted file mode 100644 index 5f4eb977cce1f6..00000000000000 --- a/changelog/20261.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -* physical/etcd: Upgrade etcd3 client to v3.5.7 -``` \ No newline at end of file diff --git a/changelog/20265.txt b/changelog/20265.txt deleted file mode 100644 index 8e27875f627f3d..00000000000000 --- a/changelog/20265.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -* api: Add Config.TLSConfig method to fetch the TLS configuration from a client config. -``` \ No newline at end of file diff --git a/changelog/20276.txt b/changelog/20276.txt deleted file mode 100644 index 71f288ab9a0d66..00000000000000 --- a/changelog/20276.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/pki: Include CA serial number, key UUID on issuers list endpoint. -``` diff --git a/changelog/20285.txt b/changelog/20285.txt deleted file mode 100644 index 2bc2241dfe0ba2..00000000000000 --- a/changelog/20285.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -openapi: Small fixes for OpenAPI display attributes. Changed "log-in" to "login" -``` diff --git a/changelog/20425.txt b/changelog/20425.txt deleted file mode 100644 index 20869fc19f708a..00000000000000 --- a/changelog/20425.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:feature -**MongoDB Atlas Database Secrets**: Adds support for client certificate credentials -``` diff --git a/changelog/20430.txt b/changelog/20430.txt deleted file mode 100644 index 5ac95f104cdb55..00000000000000 --- a/changelog/20430.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -ui: Fix secret render when path includes %. Resolves #11616. -``` diff --git a/changelog/20431.txt b/changelog/20431.txt deleted file mode 100644 index a0083d879ecd00..00000000000000 --- a/changelog/20431.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: Add download button for each secret value in KV v2 -``` diff --git a/changelog/20441.txt b/changelog/20441.txt deleted file mode 100644 index 628784883f8c47..00000000000000 --- a/changelog/20441.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/pki: Allow determining existing issuers and keys on import. -``` diff --git a/changelog/20442.txt b/changelog/20442.txt deleted file mode 100644 index 09636b69b06027..00000000000000 --- a/changelog/20442.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/pki: Add missing fields to tidy-status, include new last_auto_tidy_finished field. -``` diff --git a/changelog/20464.txt b/changelog/20464.txt deleted file mode 100644 index 6b58153fccf615..00000000000000 --- a/changelog/20464.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -cli: Add walkSecretsTree helper function, which recursively walks secrets rooted at the given path -``` diff --git a/changelog/20481.txt b/changelog/20481.txt deleted file mode 100644 index c6f27116311b59..00000000000000 --- a/changelog/20481.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: Add filtering by engine type and engine name to the Secret Engine list view. -``` diff --git a/changelog/20488.txt b/changelog/20488.txt deleted file mode 100644 index 5ea0f78b392851..00000000000000 --- a/changelog/20488.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -cli: Improve addPrefixToKVPath helper -``` diff --git a/changelog/20530.txt b/changelog/20530.txt deleted file mode 100644 index 6f6d04bf17e822..00000000000000 --- a/changelog/20530.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:feature -**Environment Variables through Vault Agent**: Introducing a new process-supervisor mode for Vault Agent which allows injecting secrets as environment variables into a child process using a new `env_template` configuration stanza. The process-supervisor configuration can be generated with a new `vault agent generate-config` helper tool. -``` diff --git a/changelog/20536.txt b/changelog/20536.txt deleted file mode 100644 index 62aa93605c380d..00000000000000 --- a/changelog/20536.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:feature -**AWS Static Roles**: The AWS Secrets Engine can manage static roles configured by users. -``` diff --git a/changelog/20548.txt b/changelog/20548.txt deleted file mode 100644 index fed5d2b4506e26..00000000000000 --- a/changelog/20548.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:feature -**Vault Proxy**: Introduced Vault Proxy, a new subcommand of the Vault binary that can be invoked using `vault proxy -config=config.hcl`. It currently has the same feature set as Vault Agent's API proxy, but the two may diverge in the future. We plan to deprecate the API proxy functionality of Vault Agent in a future release. -``` diff --git a/changelog/20559.txt b/changelog/20559.txt deleted file mode 100644 index 2ff6422db0dbeb..00000000000000 --- a/changelog/20559.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -core, secrets/pki, audit: Update dependency go-jose to v3 due to v2 deprecation. -``` diff --git a/changelog/20569.txt b/changelog/20569.txt deleted file mode 100644 index e10a4643ea7fdf..00000000000000 --- a/changelog/20569.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -agent: Add logic to validate env_template entries in configuration -``` diff --git a/changelog/20590.txt b/changelog/20590.txt deleted file mode 100644 index c1c7c9e2b52656..00000000000000 --- a/changelog/20590.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: Update Web CLI with examples and a new `kv-get` command for reading kv v2 data and metadata -``` diff --git a/changelog/20626.txt b/changelog/20626.txt deleted file mode 100644 index 2a13cee1735da1..00000000000000 --- a/changelog/20626.txt +++ /dev/null @@ -1,4 +0,0 @@ -```release-note:improvement -activitylog: EntityRecord protobufs now contain a ClientType field for -distinguishing client sources. -``` diff --git a/changelog/20628.txt b/changelog/20628.txt deleted file mode 100644 index 978814601a30eb..00000000000000 --- a/changelog/20628.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -agent: initial implementation of a process runner for injecting secrets via environment variables via vault agent -``` \ No newline at end of file diff --git a/changelog/20629.txt b/changelog/20629.txt deleted file mode 100644 index f5692f7691e084..00000000000000 --- a/changelog/20629.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -command/server (enterprise): -dev-three-node now creates perf standbys instead of regular standbys. -``` \ No newline at end of file diff --git a/changelog/20652.txt b/changelog/20652.txt deleted file mode 100644 index c41e750c04721d..00000000000000 --- a/changelog/20652.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/transit: Respond to writes with updated key policy, cache configuration. -``` diff --git a/changelog/20654.txt b/changelog/20654.txt deleted file mode 100644 index 91e567477b5b98..00000000000000 --- a/changelog/20654.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -secrets/pki: Warning when issuing leafs from CSRs with basic constraints. In the future, issuance of non-CA leaf certs from CSRs with asserted IsCA Basic Constraints will be prohibited. -``` diff --git a/changelog/20664.txt b/changelog/20664.txt deleted file mode 100644 index 6f2b4abe61ae36..00000000000000 --- a/changelog/20664.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -secrets/pki: Support setting both maintain_stored_certificate_counts=false and publish_stored_certificate_count_metrics=false explicitly in tidy config. -``` diff --git a/changelog/20680.txt b/changelog/20680.txt deleted file mode 100644 index ff80ac4660924e..00000000000000 --- a/changelog/20680.txt +++ /dev/null @@ -1,6 +0,0 @@ -```release-note:improvement -core (enterprise): support reloading configuration for automated reporting via SIGHUP -``` -```release-note:improvement -core (enterprise): license updates trigger a reload of reporting and the activity log -``` \ No newline at end of file diff --git a/changelog/20694.txt b/changelog/20694.txt deleted file mode 100644 index 07f790a666ddf6..00000000000000 --- a/changelog/20694.txt +++ /dev/null @@ -1,4 +0,0 @@ -```release-note:improvement -api: GET ... /sys/internal/counters/activity?current_billing_period=true now -results in a response which contains the full billing period -``` diff --git a/changelog/20697.txt b/changelog/20697.txt deleted file mode 100644 index be80443714dae6..00000000000000 --- a/changelog/20697.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: update detail views that render ttl durations to display full unit instead of letter (i.e. 'days' instead of 'd') -``` diff --git a/changelog/20725.txt b/changelog/20725.txt deleted file mode 100644 index 04399cca8f63ef..00000000000000 --- a/changelog/20725.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -auth/gcp: Updated plugin from v0.15.0 to v0.16.0 -``` diff --git a/changelog/20731.txt b/changelog/20731.txt deleted file mode 100644 index 1896c199add9c9..00000000000000 --- a/changelog/20731.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -ui: fixes auto_rotate_period ttl input for transit keys -``` diff --git a/changelog/20736.txt b/changelog/20736.txt deleted file mode 100644 index 1c4c3d4d256e05..00000000000000 --- a/changelog/20736.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/transit: Support BYOK-encrypted export of keys to securely allow synchronizing specific keys and version across clusters. -``` diff --git a/changelog/20741.txt b/changelog/20741.txt deleted file mode 100644 index 8034e456e0c67b..00000000000000 --- a/changelog/20741.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -agent: Add integration tests for agent running in process supervisor mode -``` diff --git a/changelog/20742.txt b/changelog/20742.txt deleted file mode 100644 index d91237e1d3912f..00000000000000 --- a/changelog/20742.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -secrets/mongodbatlas: Updated plugin from v0.9.1 to v0.10.0 -``` diff --git a/changelog/20745.txt b/changelog/20745.txt deleted file mode 100644 index 57a4391ba22dee..00000000000000 --- a/changelog/20745.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -auth/centrify: Updated plugin from v0.14.0 to v0.15.1 -``` diff --git a/changelog/20747.txt b/changelog/20747.txt deleted file mode 100644 index 4c600d203fb3a0..00000000000000 --- a/changelog/20747.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -ui: Add filtering by auth type and auth name to the Authentication Method list view. -``` diff --git a/changelog/20750.txt b/changelog/20750.txt deleted file mode 100644 index 75a3e1da364e9d..00000000000000 --- a/changelog/20750.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -secrets/ad: Updated plugin from v0.10.1-0.20230329210417-0b2cdb26cf5d to v0.16.0 -``` \ No newline at end of file diff --git a/changelog/20751.txt b/changelog/20751.txt deleted file mode 100644 index 9b78b3dfe5a29f..00000000000000 --- a/changelog/20751.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -database/redis-elasticache: Updated plugin from v0.2.0 to v0.2.1 -``` diff --git a/changelog/20752.txt b/changelog/20752.txt deleted file mode 100644 index 667bc37f37b383..00000000000000 --- a/changelog/20752.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:feature -**Vault PKI ACME Server**: Support for the ACME certificate lifecycle management protocol has been added to the Vault PKI Plugin. This allows standard ACME clients, such as the EFF's certbot and the CNCF's k8s cert-manager, to request certificates from a Vault server with no knowledge of Vault APIs or authentication mechanisms. For public-facing Vault instances, we recommend requiring External Account Bindings (EAB) to limit the ability to request certificates to only authenticated clients. -``` diff --git a/changelog/20758.txt b/changelog/20758.txt deleted file mode 100644 index 7eed0b075191a0..00000000000000 --- a/changelog/20758.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -auth/alicloud: Updated plugin from v0.14.0 to v0.15.0 -``` \ No newline at end of file diff --git a/changelog/20763.txt b/changelog/20763.txt deleted file mode 100644 index 311dcb0a62f428..00000000000000 --- a/changelog/20763.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -database/redis: Upgrade plugin dependencies -``` diff --git a/changelog/20764.txt b/changelog/20764.txt deleted file mode 100644 index adc14e07f152fb..00000000000000 --- a/changelog/20764.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -database/couchbase: Updated plugin from v0.9.0 to v0.9.2 -``` diff --git a/changelog/20767.txt b/changelog/20767.txt deleted file mode 100644 index b6d853a63903fb..00000000000000 --- a/changelog/20767.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -database/elasticsearch: Upgrade plugin dependencies -``` diff --git a/changelog/20771.txt b/changelog/20771.txt deleted file mode 100644 index 5cc1ee2d472c8e..00000000000000 --- a/changelog/20771.txt +++ /dev/null @@ -1,4 +0,0 @@ -```release-note:improvement -auth/kerberos: Enable plugin multiplexing -auth/kerberos: Upgrade plugin dependencies -``` diff --git a/changelog/20777.txt b/changelog/20777.txt deleted file mode 100644 index ec3c9e42b58ba5..00000000000000 --- a/changelog/20777.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -secrets/aure: Updated plugin from v0.15.0 to v0.16.0 -``` \ No newline at end of file diff --git a/changelog/20784.txt b/changelog/20784.txt deleted file mode 100644 index b24a857a20020a..00000000000000 --- a/changelog/20784.txt +++ /dev/null @@ -1,4 +0,0 @@ -```release-note:improvement -secrets/gcpkms: Enable plugin multiplexing -secrets/gcpkms: Upgrade plugin dependencies -``` diff --git a/changelog/20787.txt b/changelog/20787.txt deleted file mode 100644 index a69b90d7de82b4..00000000000000 --- a/changelog/20787.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -secrets/alicloud: Updated plugin from v0.5.4-beta1.0.20230330124709-3fcfc5914a22 to v0.15.0 -``` \ No newline at end of file diff --git a/changelog/20799.txt b/changelog/20799.txt deleted file mode 100644 index 2e17ff921d7b5e..00000000000000 --- a/changelog/20799.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -auth/jwt: Updated plugin from v0.15.0 to v0.16.0 -``` diff --git a/changelog/20802.txt b/changelog/20802.txt deleted file mode 100644 index de8e1b90dc0680..00000000000000 --- a/changelog/20802.txt +++ /dev/null @@ -1,6 +0,0 @@ -```release-note:change -secrets/kubernetes: Update plugin to v0.5.0 -``` -```release-note:change -auth/kubernetes: Update plugin to v0.16.0 -``` diff --git a/changelog/20807.txt b/changelog/20807.txt deleted file mode 100644 index 3a3c1f4cdad315..00000000000000 --- a/changelog/20807.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -secrets/database/snowflake: Updated plugin from v0.7.0 to v0.8.0 -``` \ No newline at end of file diff --git a/changelog/20816.txt b/changelog/20816.txt deleted file mode 100644 index aae4b59c48dc16..00000000000000 --- a/changelog/20816.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -auth/azure: Updated plugin from v0.13.0 to v0.15.0 -``` diff --git a/changelog/20818.txt b/changelog/20818.txt deleted file mode 100644 index 885ee92ce8aa4e..00000000000000 --- a/changelog/20818.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -secrets/gcp: Updated plugin from v0.15.0 to v0.16.0 -``` diff --git a/changelog/20834.txt b/changelog/20834.txt deleted file mode 100644 index f17f1d326b584b..00000000000000 --- a/changelog/20834.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:change -core: Remove feature toggle for SSCTs, i.e. the env var VAULT_DISABLE_SERVER_SIDE_CONSISTENT_TOKENS. -``` \ No newline at end of file diff --git a/changelog/20882.txt b/changelog/20882.txt deleted file mode 100644 index 3694468641da61..00000000000000 --- a/changelog/20882.txt +++ /dev/null @@ -1,6 +0,0 @@ -```release-note:change -secrets/database/mongodbatlas: Updated plugin from v0.9.0 to v0.10.0 -``` -```release-note:feature -**MongoDB Atlas Database Secrets**: Adds support for generating X.509 certificates on dynamic roles for user authentication -``` \ No newline at end of file diff --git a/changelog/20891.txt b/changelog/20891.txt deleted file mode 100644 index 3057ec56f40dcb..00000000000000 --- a/changelog/20891.txt +++ /dev/null @@ -1,4 +0,0 @@ -```release-note:improvement -secrets/consul: Improve error message when ACL bootstrapping fails. -``` - diff --git a/changelog/20933.txt b/changelog/20933.txt deleted file mode 100644 index 580475e2b5d52e..00000000000000 --- a/changelog/20933.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -core: remove unnecessary *BarrierView field from backendEntry struct -``` \ No newline at end of file diff --git a/changelog/20934.txt b/changelog/20934.txt deleted file mode 100644 index 72c22574d615ae..00000000000000 --- a/changelog/20934.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:bug -agent: Fix bug with 'cache' stanza validation -``` diff --git a/changelog/20943.txt b/changelog/20943.txt deleted file mode 100644 index 7cf186d18420df..00000000000000 --- a/changelog/20943.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/pki: Support TLS-ALPN-01 challenge type in ACME for DNS certificate identifiers. -``` diff --git a/changelog/20981.txt b/changelog/20981.txt deleted file mode 100644 index 26a5304c5d3dab..00000000000000 --- a/changelog/20981.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -secrets/pki: Limit ACME issued certificates NotAfter TTL to a maximum of 90 days -``` diff --git a/changelog/20995.txt b/changelog/20995.txt deleted file mode 100644 index 76653d4d543349..00000000000000 --- a/changelog/20995.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -api: adding a new api sys method for replication status -``` diff --git a/changelog/21010.txt b/changelog/21010.txt deleted file mode 100644 index bcd218794df9c3..00000000000000 --- a/changelog/21010.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:improvement -core: Add a new periodic metric to track the number of available policies, `vault.policy.configured.count`. -``` \ No newline at end of file diff --git a/changelog/_go-ver-1130.txt b/changelog/_go-ver-1130.txt index c63e249c45883a..d1b6a148938393 100644 --- a/changelog/_go-ver-1130.txt +++ b/changelog/_go-ver-1130.txt @@ -1,3 +1,3 @@ ```release-note:change -core: Bump Go version to 1.20. +core: Bump Go version to 1.20.1. ``` diff --git a/changelog/_go-ver-1132.txt b/changelog/_go-ver-1132.txt new file mode 100644 index 00000000000000..48c63ff472ba41 --- /dev/null +++ b/changelog/_go-ver-1132.txt @@ -0,0 +1,3 @@ +```release-note:change +core: Bump Go version to 1.20.3. +``` diff --git a/changelog/_go-ver-1140.txt b/changelog/_go-ver-1133.txt similarity index 100% rename from changelog/_go-ver-1140.txt rename to changelog/_go-ver-1133.txt diff --git a/changelog/pki-ui-improvements.txt b/changelog/pki-ui-improvements.txt deleted file mode 100644 index d824033f2e3c4a..00000000000000 --- a/changelog/pki-ui-improvements.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:feature -**NEW PKI Workflow in UI**: Completes generally available rollout of new PKI UI that provides smoother mount configuration and a more guided user experience -``` \ No newline at end of file diff --git a/command/agent.go b/command/agent.go index 11942ce5337754..6bc896de5109fa 100644 --- a/command/agent.go +++ b/command/agent.go @@ -1,49 +1,54 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( "context" "crypto/tls" - "errors" "flag" "fmt" "io" + "io/ioutil" "net" "net/http" "os" + "path/filepath" "sort" "strings" "sync" "time" - systemd "github.com/coreos/go-systemd/daemon" + token_file "github.com/hashicorp/vault/command/agent/auth/token-file" + ctconfig "github.com/hashicorp/consul-template/config" - log "github.com/hashicorp/go-hclog" "github.com/hashicorp/go-multierror" + + "github.com/hashicorp/vault/command/agent/sink/inmem" + + systemd "github.com/coreos/go-systemd/daemon" + log "github.com/hashicorp/go-hclog" "github.com/hashicorp/go-secure-stdlib/gatedwriter" "github.com/hashicorp/go-secure-stdlib/parseutil" "github.com/hashicorp/go-secure-stdlib/reloadutil" - "github.com/kr/pretty" - "github.com/mitchellh/cli" - "github.com/oklog/run" - "github.com/posener/complete" - "golang.org/x/text/cases" - "golang.org/x/text/language" - "google.golang.org/grpc/test/bufconn" - "github.com/hashicorp/vault/api" + "github.com/hashicorp/vault/command/agent/auth" + "github.com/hashicorp/vault/command/agent/auth/alicloud" + "github.com/hashicorp/vault/command/agent/auth/approle" + "github.com/hashicorp/vault/command/agent/auth/aws" + "github.com/hashicorp/vault/command/agent/auth/azure" + "github.com/hashicorp/vault/command/agent/auth/cert" + "github.com/hashicorp/vault/command/agent/auth/cf" + "github.com/hashicorp/vault/command/agent/auth/gcp" + "github.com/hashicorp/vault/command/agent/auth/jwt" + "github.com/hashicorp/vault/command/agent/auth/kerberos" + "github.com/hashicorp/vault/command/agent/auth/kubernetes" + "github.com/hashicorp/vault/command/agent/cache" + "github.com/hashicorp/vault/command/agent/cache/cacheboltdb" + "github.com/hashicorp/vault/command/agent/cache/cachememdb" + "github.com/hashicorp/vault/command/agent/cache/keymanager" agentConfig "github.com/hashicorp/vault/command/agent/config" - "github.com/hashicorp/vault/command/agent/exec" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" "github.com/hashicorp/vault/command/agent/template" - "github.com/hashicorp/vault/command/agentproxyshared" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - cache "github.com/hashicorp/vault/command/agentproxyshared/cache" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" - "github.com/hashicorp/vault/command/agentproxyshared/sink/inmem" - "github.com/hashicorp/vault/command/agentproxyshared/winsvc" + "github.com/hashicorp/vault/command/agent/winsvc" "github.com/hashicorp/vault/helper/logging" "github.com/hashicorp/vault/helper/metricsutil" "github.com/hashicorp/vault/helper/useragent" @@ -52,6 +57,11 @@ import ( "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/logical" "github.com/hashicorp/vault/version" + "github.com/kr/pretty" + "github.com/mitchellh/cli" + "github.com/oklog/run" + "github.com/posener/complete" + "google.golang.org/grpc/test/bufconn" ) var ( @@ -244,7 +254,7 @@ func (c *AgentCommand) Run(args []string) int { // Ignore any setting of Agent's address. This client is used by the Agent // to reach out to Vault. This should never loop back to agent. - c.flagAgentProxyAddress = "" + c.flagAgentAddress = "" client, err := c.Client() if err != nil { c.UI.Error(fmt.Sprintf( @@ -264,17 +274,7 @@ func (c *AgentCommand) Run(args []string) int { } } - if config.IsDefaultListerDefined() { - // Notably, we cannot know for sure if they are using the API proxy functionality unless - // we log on each API proxy call, which would be too noisy. - // A customer could have a listener defined but only be using e.g. the cache-clear API, - // even though the API proxy is something they have available. - c.UI.Warn("==> Note: Vault Agent will be deprecating API proxy functionality in a future " + - "release, and this functionality has moved to a new subcommand, vault proxy. If you rely on this " + - "functionality, plan to move to Vault Proxy instead.") - } - - // ctx and cancelFunc are passed to the AuthHandler, SinkServer, ExecServer and + // ctx and cancelFunc are passed to the AuthHandler, SinkServer, and // TemplateServer that periodically listen for ctx.Done() to fire and shut // down accordingly. ctx, cancelFunc := context.WithCancel(context.Background()) @@ -286,7 +286,7 @@ func (c *AgentCommand) Run(args []string) int { Ui: c.UI, ServiceName: "vault", DisplayName: "Vault", - UserAgent: useragent.AgentString(), + UserAgent: useragent.String(), ClusterName: config.ClusterName, }) if err != nil { @@ -349,9 +349,37 @@ func (c *AgentCommand) Run(args []string) int { MountPath: config.AutoAuth.Method.MountPath, Config: config.AutoAuth.Method.Config, } - method, err = agentproxyshared.GetAutoAuthMethodFromConfig(config.AutoAuth.Method.Type, authConfig, config.Vault.Address) + switch config.AutoAuth.Method.Type { + case "alicloud": + method, err = alicloud.NewAliCloudAuthMethod(authConfig) + case "aws": + method, err = aws.NewAWSAuthMethod(authConfig) + case "azure": + method, err = azure.NewAzureAuthMethod(authConfig) + case "cert": + method, err = cert.NewCertAuthMethod(authConfig) + case "cf": + method, err = cf.NewCFAuthMethod(authConfig) + case "gcp": + method, err = gcp.NewGCPAuthMethod(authConfig) + case "jwt": + method, err = jwt.NewJWTAuthMethod(authConfig) + case "kerberos": + method, err = kerberos.NewKerberosAuthMethod(authConfig) + case "kubernetes": + method, err = kubernetes.NewKubernetesAuthMethod(authConfig) + case "approle": + method, err = approle.NewApproleAuthMethod(authConfig) + case "token_file": + method, err = token_file.NewTokenFileAuthMethod(authConfig) + case "pcf": // Deprecated. + method, err = cf.NewCFAuthMethod(authConfig) + default: + c.UI.Error(fmt.Sprintf("Unknown auth method %q", config.AutoAuth.Method.Type)) + return 1 + } if err != nil { - c.UI.Error(fmt.Sprintf("Error creating %s auth method: %v", config.AutoAuth.Method.Type, err)) + c.UI.Error(fmt.Errorf("Error creating %s auth method: %w", config.AutoAuth.Method.Type, err).Error()) return 1 } } @@ -471,12 +499,10 @@ func (c *AgentCommand) Run(args []string) int { // The API proxy to be used, if listeners are configured apiProxy, err := cache.NewAPIProxy(&cache.APIProxyConfig{ - Client: proxyClient, - Logger: apiProxyLogger, - EnforceConsistency: enforceConsistency, - WhenInconsistentAction: whenInconsistent, - UserAgentStringFunction: useragent.AgentProxyStringWithProxiedUserAgent, - UserAgentString: useragent.AgentProxyString(), + Client: proxyClient, + Logger: apiProxyLogger, + EnforceConsistency: enforceConsistency, + WhenInconsistentAction: whenInconsistent, }) if err != nil { c.UI.Error(fmt.Sprintf("Error creating API proxy: %v", err)) @@ -502,14 +528,147 @@ func (c *AgentCommand) Run(args []string) int { // Configure persistent storage and add to LeaseCache if config.Cache.Persist != nil { - deferFunc, oldToken, err := agentproxyshared.AddPersistentStorageToLeaseCache(ctx, leaseCache, config.Cache.Persist, cacheLogger) + if config.Cache.Persist.Path == "" { + c.UI.Error("must specify persistent cache path") + return 1 + } + + // Set AAD based on key protection type + var aad string + switch config.Cache.Persist.Type { + case "kubernetes": + aad, err = getServiceAccountJWT(config.Cache.Persist.ServiceAccountTokenFile) + if err != nil { + c.UI.Error(fmt.Sprintf("failed to read service account token from %s: %s", config.Cache.Persist.ServiceAccountTokenFile, err)) + return 1 + } + default: + c.UI.Error(fmt.Sprintf("persistent key protection type %q not supported", config.Cache.Persist.Type)) + return 1 + } + + // Check if bolt file exists already + dbFileExists, err := cacheboltdb.DBFileExists(config.Cache.Persist.Path) if err != nil { - c.UI.Error(fmt.Sprintf("Error creating persistent cache: %v", err)) + c.UI.Error(fmt.Sprintf("failed to check if bolt file exists at path %s: %s", config.Cache.Persist.Path, err)) return 1 } - previousToken = oldToken - if deferFunc != nil { - defer deferFunc() + if dbFileExists { + // Open the bolt file, but wait to setup Encryption + ps, err := cacheboltdb.NewBoltStorage(&cacheboltdb.BoltStorageConfig{ + Path: config.Cache.Persist.Path, + Logger: cacheLogger.Named("cacheboltdb"), + }) + if err != nil { + c.UI.Error(fmt.Sprintf("Error opening persistent cache: %v", err)) + return 1 + } + + // Get the token from bolt for retrieving the encryption key, + // then setup encryption so that restore is possible + token, err := ps.GetRetrievalToken() + if err != nil { + c.UI.Error(fmt.Sprintf("Error getting retrieval token from persistent cache: %v", err)) + } + + if err := ps.Close(); err != nil { + c.UI.Warn(fmt.Sprintf("Failed to close persistent cache file after getting retrieval token: %s", err)) + } + + km, err := keymanager.NewPassthroughKeyManager(ctx, token) + if err != nil { + c.UI.Error(fmt.Sprintf("failed to configure persistence encryption for cache: %s", err)) + return 1 + } + + // Open the bolt file with the wrapper provided + ps, err = cacheboltdb.NewBoltStorage(&cacheboltdb.BoltStorageConfig{ + Path: config.Cache.Persist.Path, + Logger: cacheLogger.Named("cacheboltdb"), + Wrapper: km.Wrapper(), + AAD: aad, + }) + if err != nil { + c.UI.Error(fmt.Sprintf("Error opening persistent cache with wrapper: %v", err)) + return 1 + } + + // Restore anything in the persistent cache to the memory cache + if err := leaseCache.Restore(ctx, ps); err != nil { + c.UI.Error(fmt.Sprintf("Error restoring in-memory cache from persisted file: %v", err)) + if config.Cache.Persist.ExitOnErr { + return 1 + } + } + cacheLogger.Info("loaded memcache from persistent storage") + + // Check for previous auto-auth token + oldTokenBytes, err := ps.GetAutoAuthToken(ctx) + if err != nil { + c.UI.Error(fmt.Sprintf("Error in fetching previous auto-auth token: %s", err)) + if config.Cache.Persist.ExitOnErr { + return 1 + } + } + if len(oldTokenBytes) > 0 { + oldToken, err := cachememdb.Deserialize(oldTokenBytes) + if err != nil { + c.UI.Error(fmt.Sprintf("Error in deserializing previous auto-auth token cache entry: %s", err)) + if config.Cache.Persist.ExitOnErr { + return 1 + } + } + previousToken = oldToken.Token + } + + // If keep_after_import true, set persistent storage layer in + // leaseCache, else remove db file + if config.Cache.Persist.KeepAfterImport { + defer ps.Close() + leaseCache.SetPersistentStorage(ps) + } else { + if err := ps.Close(); err != nil { + c.UI.Warn(fmt.Sprintf("failed to close persistent cache file: %s", err)) + } + dbFile := filepath.Join(config.Cache.Persist.Path, cacheboltdb.DatabaseFileName) + if err := os.Remove(dbFile); err != nil { + c.UI.Error(fmt.Sprintf("failed to remove persistent storage file %s: %s", dbFile, err)) + if config.Cache.Persist.ExitOnErr { + return 1 + } + } + } + } else { + km, err := keymanager.NewPassthroughKeyManager(ctx, nil) + if err != nil { + c.UI.Error(fmt.Sprintf("failed to configure persistence encryption for cache: %s", err)) + return 1 + } + ps, err := cacheboltdb.NewBoltStorage(&cacheboltdb.BoltStorageConfig{ + Path: config.Cache.Persist.Path, + Logger: cacheLogger.Named("cacheboltdb"), + Wrapper: km.Wrapper(), + AAD: aad, + }) + if err != nil { + c.UI.Error(fmt.Sprintf("Error creating persistent cache: %v", err)) + return 1 + } + cacheLogger.Info("configured persistent storage", "path", config.Cache.Persist.Path) + + // Stash the key material in bolt + token, err := km.RetrievalToken(ctx) + if err != nil { + c.UI.Error(fmt.Sprintf("Error getting persistent key: %s", err)) + return 1 + } + if err := ps.StoreRetrievalToken(token); err != nil { + c.UI.Error(fmt.Sprintf("Error setting key in persistent cache: %v", err)) + return 1 + } + + defer ps.Close() + leaseCache.SetPersistentStorage(ps) } } } @@ -517,7 +676,7 @@ func (c *AgentCommand) Run(args []string) int { var listeners []net.Listener // If there are templates, add an in-process listener - if len(config.Templates) > 0 || len(config.EnvTemplates) > 0 { + if len(config.Templates) > 0 { config.Listeners = append(config.Listeners, &configutil.Listener{Type: listenerutil.BufConnType}) } @@ -680,8 +839,7 @@ func (c *AgentCommand) Run(args []string) int { // Start auto-auth and sink servers if method != nil { - enableTemplateTokenCh := len(config.Templates) > 0 - enableEnvTemplateTokenCh := len(config.EnvTemplates) > 0 + enableTokenCh := len(config.Templates) > 0 // Auth Handler is going to set its own retry values, so we want to // work on a copy of the client to not affect other subsystems. @@ -706,12 +864,9 @@ func (c *AgentCommand) Run(args []string) int { MinBackoff: config.AutoAuth.Method.MinBackoff, MaxBackoff: config.AutoAuth.Method.MaxBackoff, EnableReauthOnNewCredentials: config.AutoAuth.EnableReauthOnNewCredentials, - EnableTemplateTokenCh: enableTemplateTokenCh, - EnableExecTokenCh: enableEnvTemplateTokenCh, + EnableTemplateTokenCh: enableTokenCh, Token: previousToken, ExitOnError: config.AutoAuth.Method.ExitOnError, - UserAgent: useragent.AgentAutoAuthString(), - MetricsSignifier: "agent", }) ss := sink.NewSinkServer(&sink.SinkServerConfig{ @@ -729,14 +884,6 @@ func (c *AgentCommand) Run(args []string) int { ExitAfterAuth: config.ExitAfterAuth, }) - es := exec.NewServer(&exec.ServerConfig{ - AgentConfig: c.config, - Namespace: templateNamespace, - Logger: c.logger.Named("exec.server"), - LogLevel: c.logger.GetLevel(), - LogWriter: c.logWriter, - }) - g.Add(func() error { return ah.Run(ctx, method) }, func(error) { @@ -791,29 +938,17 @@ func (c *AgentCommand) Run(args []string) int { ts.Stop() }) - g.Add(func() error { - return es.Run(ctx, ah.ExecTokenCh) - }, func(err error) { - // Let the lease cache know this is a shutdown; no need to evict - // everything - if leaseCache != nil { - leaseCache.SetShuttingDown(true) - } - cancelFunc() - }) - } // Server configuration output padding := 24 sort.Strings(infoKeys) - caser := cases.Title(language.English) c.UI.Output("==> Vault Agent configuration:\n") for _, k := range infoKeys { c.UI.Output(fmt.Sprintf( "%s%s: %s", strings.Repeat(" ", padding-len(k)), - caser.String(k), + strings.Title(k), info[k])) } c.UI.Output("") @@ -838,21 +973,11 @@ func (c *AgentCommand) Run(args []string) int { var exitCode int if err := g.Run(); err != nil { - var processExitError *exec.ProcessExitError - if errors.As(err, &processExitError) { - exitCode = processExitError.ExitCode - } else { - exitCode = 1 - } - - if exitCode != 0 { - c.logger.Error("runtime error encountered", "error", err, "exitCode", exitCode) - c.UI.Error("Error encountered during run, refer to logs for more details.") - } + c.logger.Error("runtime error encountered", "error", err) + c.UI.Error("Error encountered during run, refer to logs for more details.") + exitCode = 1 } - c.notifySystemd(systemd.SdNotifyStopping) - return exitCode } @@ -1031,6 +1156,19 @@ func (c *AgentCommand) removePidFile(pidPath string) error { return os.Remove(pidPath) } +// GetServiceAccountJWT reads the service account jwt from `tokenFile`. Default is +// the default service account file path in kubernetes. +func getServiceAccountJWT(tokenFile string) (string, error) { + if len(tokenFile) == 0 { + tokenFile = "/var/run/secrets/kubernetes.io/serviceaccount/token" + } + token, err := ioutil.ReadFile(tokenFile) + if err != nil { + return "", err + } + return strings.TrimSpace(string(token)), nil +} + func (c *AgentCommand) handleMetrics() http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet { @@ -1056,7 +1194,7 @@ func (c *AgentCommand) handleMetrics() http.Handler { w.Header().Set("Content-Type", resp.Data[logical.HTTPContentType].(string)) switch v := resp.Data[logical.HTTPRawBody].(type) { case string: - w.WriteHeader(status) + w.WriteHeader((status)) w.Write([]byte(v)) case []byte: w.WriteHeader(status) diff --git a/command/agent/alicloud_end_to_end_test.go b/command/agent/alicloud_end_to_end_test.go index 0f5cdfbe4ecbed..64610811292ac3 100644 --- a/command/agent/alicloud_end_to_end_test.go +++ b/command/agent/alicloud_end_to_end_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -19,10 +16,10 @@ import ( uuid "github.com/hashicorp/go-uuid" vaultalicloud "github.com/hashicorp/vault-plugin-auth-alicloud" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentalicloud "github.com/hashicorp/vault/command/agentproxyshared/auth/alicloud" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" + "github.com/hashicorp/vault/command/agent/auth" + agentalicloud "github.com/hashicorp/vault/command/agent/auth/alicloud" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" "github.com/hashicorp/vault/helper/testhelpers" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/logging" diff --git a/command/agent/approle_end_to_end_test.go b/command/agent/approle_end_to_end_test.go index 515a13ec52f8de..e3456b3b5c747b 100644 --- a/command/agent/approle_end_to_end_test.go +++ b/command/agent/approle_end_to_end_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -16,10 +13,10 @@ import ( log "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" credAppRole "github.com/hashicorp/vault/builtin/credential/approle" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentapprole "github.com/hashicorp/vault/command/agentproxyshared/auth/approle" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" + "github.com/hashicorp/vault/command/agent/auth" + agentapprole "github.com/hashicorp/vault/command/agent/auth/approle" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/logging" "github.com/hashicorp/vault/sdk/logical" diff --git a/command/agentproxyshared/auth/alicloud/alicloud.go b/command/agent/auth/alicloud/alicloud.go similarity index 98% rename from command/agentproxyshared/auth/alicloud/alicloud.go rename to command/agent/auth/alicloud/alicloud.go index 724597682a1f38..6fc640c290e0ae 100644 --- a/command/agentproxyshared/auth/alicloud/alicloud.go +++ b/command/agent/auth/alicloud/alicloud.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package alicloud import ( @@ -17,7 +14,7 @@ import ( hclog "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault-plugin-auth-alicloud/tools" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) /* diff --git a/command/agentproxyshared/auth/approle/approle.go b/command/agent/auth/approle/approle.go similarity index 98% rename from command/agentproxyshared/auth/approle/approle.go rename to command/agent/auth/approle/approle.go index 9f33980a9f5460..e58299ad7b2e84 100644 --- a/command/agentproxyshared/auth/approle/approle.go +++ b/command/agent/auth/approle/approle.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package approle import ( @@ -15,7 +12,7 @@ import ( hclog "github.com/hashicorp/go-hclog" "github.com/hashicorp/go-secure-stdlib/parseutil" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) type approleMethod struct { diff --git a/command/agentproxyshared/auth/auth.go b/command/agent/auth/auth.go similarity index 79% rename from command/agentproxyshared/auth/auth.go rename to command/agent/auth/auth.go index fdcf12f947e221..3be7951e0c432a 100644 --- a/command/agentproxyshared/auth/auth.go +++ b/command/agent/auth/auth.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package auth import ( @@ -13,7 +10,6 @@ import ( "github.com/armon/go-metrics" "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/api" "github.com/hashicorp/vault/sdk/helper/jsonutil" ) @@ -23,7 +19,7 @@ const ( defaultMaxBackoff = 5 * time.Minute ) -// AuthMethod is the interface that auto-auth methods implement for the agent/proxy +// AuthMethod is the interface that auto-auth methods implement for the agent // to use. type AuthMethod interface { // Authenticate returns a mount path, header, request body, and error. @@ -53,10 +49,7 @@ type AuthConfig struct { type AuthHandler struct { OutputCh chan string TemplateTokenCh chan string - ExecTokenCh chan string token string - userAgent string - metricsSignifier string logger hclog.Logger client *api.Client random *rand.Rand @@ -65,36 +58,27 @@ type AuthHandler struct { minBackoff time.Duration enableReauthOnNewCredentials bool enableTemplateTokenCh bool - enableExecTokenCh bool exitOnError bool } type AuthHandlerConfig struct { - Logger hclog.Logger - Client *api.Client - WrapTTL time.Duration - MaxBackoff time.Duration - MinBackoff time.Duration - Token string - // UserAgent is the HTTP UserAgent header auto-auth will use when - // communicating with Vault. - UserAgent string - // MetricsSignifier is the first argument we will give to - // metrics.IncrCounter, signifying what the name of the application is - MetricsSignifier string + Logger hclog.Logger + Client *api.Client + WrapTTL time.Duration + MaxBackoff time.Duration + MinBackoff time.Duration + Token string EnableReauthOnNewCredentials bool EnableTemplateTokenCh bool - EnableExecTokenCh bool ExitOnError bool } func NewAuthHandler(conf *AuthHandlerConfig) *AuthHandler { ah := &AuthHandler{ // This is buffered so that if we try to output after the sink server - // has been shut down, during agent/proxy shutdown, we won't block + // has been shut down, during agent shutdown, we won't block OutputCh: make(chan string, 1), TemplateTokenCh: make(chan string, 1), - ExecTokenCh: make(chan string, 1), token: conf.Token, logger: conf.Logger, client: conf.Client, @@ -104,16 +88,13 @@ func NewAuthHandler(conf *AuthHandlerConfig) *AuthHandler { maxBackoff: conf.MaxBackoff, enableReauthOnNewCredentials: conf.EnableReauthOnNewCredentials, enableTemplateTokenCh: conf.EnableTemplateTokenCh, - enableExecTokenCh: conf.EnableExecTokenCh, exitOnError: conf.ExitOnError, - userAgent: conf.UserAgent, - metricsSignifier: conf.MetricsSignifier, } return ah } -func backoff(ctx context.Context, backoff *autoAuthBackoff) bool { +func backoff(ctx context.Context, backoff *agentBackoff) bool { if backoff.exitOnErr { return false } @@ -138,7 +119,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { ah.minBackoff = defaultMinBackoff } - backoffCfg := newAutoAuthBackoff(ah.minBackoff, ah.maxBackoff, ah.exitOnError) + backoffCfg := newAgentBackoff(ah.minBackoff, ah.maxBackoff, ah.exitOnError) if backoffCfg.min >= backoffCfg.max { return errors.New("auth handler: min_backoff cannot be greater than max_backoff") @@ -149,7 +130,6 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { am.Shutdown() close(ah.OutputCh) close(ah.TemplateTokenCh) - close(ah.ExecTokenCh) ah.logger.Info("auth handler stopped") }() @@ -173,15 +153,6 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { credCh = make(chan struct{}) } - if ah.client != nil { - headers := ah.client.Headers() - if headers == nil { - headers = make(http.Header) - } - headers.Set("User-Agent", ah.userAgent) - ah.client.SetHeaders(headers) - } - var watcher *api.LifetimeWatcher first := true @@ -205,7 +176,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { clientToUse, err = am.(AuthMethodWithClient).AuthClient(ah.client) if err != nil { ah.logger.Error("error creating client for authentication call", "error", err, "backoff", backoff) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -232,7 +203,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { secret, err = clientToUse.Auth().Token().LookupSelfWithContext(ctx) if err != nil { ah.logger.Error("could not look up token", "err", err, "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -252,7 +223,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { path, header, data, err = am.Authenticate(ctx, ah.client) if err != nil { ah.logger.Error("error getting path or data from method", "error", err, "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -265,7 +236,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { wrapClient, err := clientToUse.Clone() if err != nil { ah.logger.Error("error creating client for wrapped call", "error", err, "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -303,7 +274,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { // Check errors/sanity if err != nil { ah.logger.Error("error authenticating", "error", err, "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -318,7 +289,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { case ah.wrapTTL > 0: if secret.WrapInfo == nil { ah.logger.Error("authentication returned nil wrap info", "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -327,7 +298,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { } if secret.WrapInfo.Token == "" { ah.logger.Error("authentication returned empty wrapped client token", "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -337,7 +308,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { wrappedResp, err := jsonutil.EncodeJSON(secret.WrapInfo) if err != nil { ah.logger.Error("failed to encode wrapinfo", "error", err, "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -349,9 +320,6 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { if ah.enableTemplateTokenCh { ah.TemplateTokenCh <- string(wrappedResp) } - if ah.enableExecTokenCh { - ah.ExecTokenCh <- string(wrappedResp) - } am.CredSuccess() backoffCfg.reset() @@ -376,7 +344,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { // i.e. if the token is invalid, we will fail in the authentication step if secret == nil || secret.Data == nil { ah.logger.Error("token file validation failed, token may be invalid", "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -386,7 +354,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { token, ok := secret.Data["id"].(string) if !ok || token == "" { ah.logger.Error("token file validation returned empty client token", "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -407,9 +375,6 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { if ah.enableTemplateTokenCh { ah.TemplateTokenCh <- token } - if ah.enableExecTokenCh { - ah.ExecTokenCh <- token - } tokenType := secret.Data["type"].(string) if tokenType == "batch" { @@ -418,7 +383,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { } else { if secret == nil || secret.Auth == nil { ah.logger.Error("authentication returned nil auth info", "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -427,7 +392,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { } if secret.Auth.ClientToken == "" { ah.logger.Error("authentication returned empty client token", "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -441,9 +406,6 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { if ah.enableTemplateTokenCh { ah.TemplateTokenCh <- secret.Auth.ClientToken } - if ah.enableExecTokenCh { - ah.ExecTokenCh <- secret.Auth.ClientToken - } } am.CredSuccess() @@ -459,7 +421,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { }) if err != nil { ah.logger.Error("error creating lifetime watcher", "error", err, "backoff", backoffCfg) - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) if backoff(ctx, backoffCfg) { continue @@ -467,7 +429,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { return err } - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "success"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "success"}, 1) // We don't want to trigger the renewal process for tokens with // unlimited TTL, such as the root token. if leaseDuration == 0 && isTokenFileMethod { @@ -488,13 +450,13 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { case err := <-watcher.DoneCh(): ah.logger.Info("lifetime watcher done channel triggered") if err != nil { - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "failure"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "failure"}, 1) ah.logger.Error("error renewing token", "error", err) } break LifetimeWatcherLoop case <-watcher.RenewCh(): - metrics.IncrCounter([]string{ah.metricsSignifier, "auth", "success"}, 1) + metrics.IncrCounter([]string{"agent", "auth", "success"}, 1) ah.logger.Info("renewed auth token") case <-credCh: @@ -505,15 +467,15 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error { } } -// autoAuthBackoff tracks exponential backoff state. -type autoAuthBackoff struct { +// agentBackoff tracks exponential backoff state. +type agentBackoff struct { min time.Duration max time.Duration current time.Duration exitOnErr bool } -func newAutoAuthBackoff(min, max time.Duration, exitErr bool) *autoAuthBackoff { +func newAgentBackoff(min, max time.Duration, exitErr bool) *agentBackoff { if max <= 0 { max = defaultMaxBackoff } @@ -522,7 +484,7 @@ func newAutoAuthBackoff(min, max time.Duration, exitErr bool) *autoAuthBackoff { min = defaultMinBackoff } - return &autoAuthBackoff{ + return &agentBackoff{ current: min, max: max, min: min, @@ -532,7 +494,7 @@ func newAutoAuthBackoff(min, max time.Duration, exitErr bool) *autoAuthBackoff { // next determines the next backoff duration that is roughly twice // the current value, capped to a max value, with a measure of randomness. -func (b *autoAuthBackoff) next() { +func (b *agentBackoff) next() { maxBackoff := 2 * b.current if maxBackoff > b.max { @@ -544,10 +506,10 @@ func (b *autoAuthBackoff) next() { b.current = maxBackoff - time.Duration(trim) } -func (b *autoAuthBackoff) reset() { +func (b *agentBackoff) reset() { b.current = b.min } -func (b autoAuthBackoff) String() string { +func (b agentBackoff) String() string { return b.current.Truncate(10 * time.Millisecond).String() } diff --git a/command/agentproxyshared/auth/auth_test.go b/command/agent/auth/auth_test.go similarity index 95% rename from command/agentproxyshared/auth/auth_test.go rename to command/agent/auth/auth_test.go index 5729435020fbaa..9501342749bb48 100644 --- a/command/agentproxyshared/auth/auth_test.go +++ b/command/agent/auth/auth_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package auth import ( @@ -112,7 +109,7 @@ consumption: func TestAgentBackoff(t *testing.T) { max := 1024 * time.Second - backoff := newAutoAuthBackoff(defaultMinBackoff, max, false) + backoff := newAgentBackoff(defaultMinBackoff, max, false) // Test initial value if backoff.current != defaultMinBackoff { @@ -162,7 +159,7 @@ func TestAgentMinBackoffCustom(t *testing.T) { for _, test := range tests { max := 1024 * time.Second - backoff := newAutoAuthBackoff(test.minBackoff, max, false) + backoff := newAgentBackoff(test.minBackoff, max, false) // Test initial value if backoff.current != test.want { diff --git a/command/agentproxyshared/auth/aws/aws.go b/command/agent/auth/aws/aws.go similarity index 98% rename from command/agentproxyshared/auth/aws/aws.go rename to command/agent/auth/aws/aws.go index 53d1623be8a4cb..b9dbdd5499c82a 100644 --- a/command/agentproxyshared/auth/aws/aws.go +++ b/command/agent/auth/aws/aws.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aws import ( @@ -20,7 +17,7 @@ import ( "github.com/hashicorp/go-secure-stdlib/awsutil" "github.com/hashicorp/go-uuid" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) const ( diff --git a/command/agentproxyshared/auth/azure/azure.go b/command/agent/auth/azure/azure.go similarity index 97% rename from command/agentproxyshared/auth/azure/azure.go rename to command/agent/auth/azure/azure.go index 77e6613971b37a..5554e72c2de176 100644 --- a/command/agentproxyshared/auth/azure/azure.go +++ b/command/agent/auth/azure/azure.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package azure import ( @@ -13,7 +10,7 @@ import ( cleanhttp "github.com/hashicorp/go-cleanhttp" hclog "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" "github.com/hashicorp/vault/helper/useragent" "github.com/hashicorp/vault/sdk/helper/jsonutil" ) diff --git a/command/agentproxyshared/auth/cert/cert.go b/command/agent/auth/cert/cert.go similarity index 89% rename from command/agentproxyshared/auth/cert/cert.go rename to command/agent/auth/cert/cert.go index 5270dcb1b4a278..2703aa8ecd5892 100644 --- a/command/agentproxyshared/auth/cert/cert.go +++ b/command/agent/auth/cert/cert.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( @@ -11,7 +8,7 @@ import ( "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" "github.com/hashicorp/vault/sdk/helper/consts" ) @@ -23,7 +20,6 @@ type certMethod struct { caCert string clientCert string clientKey string - reload bool // Client is the cached client to use if cert info was provided. client *api.Client @@ -77,14 +73,6 @@ func NewCertAuthMethod(conf *auth.AuthConfig) (auth.AuthMethod, error) { return nil, errors.New("could not convert 'cert_key' config value to string") } } - - reload, ok := conf.Config["reload"] - if ok { - c.reload, ok = reload.(bool) - if !ok { - return nil, errors.New("could not convert 'reload' config value to bool") - } - } } return c, nil @@ -120,7 +108,7 @@ func (c *certMethod) AuthClient(client *api.Client) (*api.Client, error) { if c.caCert != "" || (c.clientKey != "" && c.clientCert != "") { // Return cached client if present - if c.client != nil && !c.reload { + if c.client != nil { return c.client, nil } diff --git a/command/agentproxyshared/auth/cert/cert_test.go b/command/agent/auth/cert/cert_test.go similarity index 67% rename from command/agentproxyshared/auth/cert/cert_test.go rename to command/agent/auth/cert/cert_test.go index 43a5f83f4c29d7..15ff8f4327f37e 100644 --- a/command/agentproxyshared/auth/cert/cert_test.go +++ b/command/agent/auth/cert/cert_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cert import ( @@ -12,7 +9,7 @@ import ( "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) func TestCertAuthMethod_Authenticate(t *testing.T) { @@ -133,59 +130,3 @@ func TestCertAuthMethod_AuthClient_withCerts(t *testing.T) { t.Fatal("expected client from AuthClient to return back a cached client") } } - -func TestCertAuthMethod_AuthClient_withCertsReload(t *testing.T) { - clientCert, err := os.Open("./test-fixtures/keys/cert.pem") - if err != nil { - t.Fatal(err) - } - - defer clientCert.Close() - - clientKey, err := os.Open("./test-fixtures/keys/key.pem") - if err != nil { - t.Fatal(err) - } - - defer clientKey.Close() - - config := &auth.AuthConfig{ - Logger: hclog.NewNullLogger(), - MountPath: "cert-test", - Config: map[string]interface{}{ - "name": "with-certs-reloaded", - "client_cert": clientCert.Name(), - "client_key": clientKey.Name(), - "reload": true, - }, - } - - method, err := NewCertAuthMethod(config) - if err != nil { - t.Fatal(err) - } - - client, err := api.NewClient(nil) - if err != nil { - t.Fatal(err) - } - - clientToUse, err := method.(auth.AuthMethodWithClient).AuthClient(client) - if err != nil { - t.Fatal(err) - } - - if client == clientToUse { - t.Fatal("expected client from AuthClient to be different from original client") - } - - // Call AuthClient again to get back a new client with reloaded certificates - reloadedClient, err := method.(auth.AuthMethodWithClient).AuthClient(client) - if err != nil { - t.Fatal(err) - } - - if reloadedClient == clientToUse { - t.Fatal("expected client from AuthClient to return back a new client") - } -} diff --git a/command/agentproxyshared/auth/cert/test-fixtures/keys/cert.pem b/command/agent/auth/cert/test-fixtures/keys/cert.pem similarity index 100% rename from command/agentproxyshared/auth/cert/test-fixtures/keys/cert.pem rename to command/agent/auth/cert/test-fixtures/keys/cert.pem diff --git a/command/agentproxyshared/auth/cert/test-fixtures/keys/key.pem b/command/agent/auth/cert/test-fixtures/keys/key.pem similarity index 100% rename from command/agentproxyshared/auth/cert/test-fixtures/keys/key.pem rename to command/agent/auth/cert/test-fixtures/keys/key.pem diff --git a/command/agentproxyshared/auth/cert/test-fixtures/keys/pkioutput b/command/agent/auth/cert/test-fixtures/keys/pkioutput similarity index 100% rename from command/agentproxyshared/auth/cert/test-fixtures/keys/pkioutput rename to command/agent/auth/cert/test-fixtures/keys/pkioutput diff --git a/command/agentproxyshared/auth/cert/test-fixtures/root/pkioutput b/command/agent/auth/cert/test-fixtures/root/pkioutput similarity index 100% rename from command/agentproxyshared/auth/cert/test-fixtures/root/pkioutput rename to command/agent/auth/cert/test-fixtures/root/pkioutput diff --git a/command/agentproxyshared/auth/cert/test-fixtures/root/root.crl b/command/agent/auth/cert/test-fixtures/root/root.crl similarity index 100% rename from command/agentproxyshared/auth/cert/test-fixtures/root/root.crl rename to command/agent/auth/cert/test-fixtures/root/root.crl diff --git a/command/agentproxyshared/auth/cert/test-fixtures/root/rootcacert.pem b/command/agent/auth/cert/test-fixtures/root/rootcacert.pem similarity index 100% rename from command/agentproxyshared/auth/cert/test-fixtures/root/rootcacert.pem rename to command/agent/auth/cert/test-fixtures/root/rootcacert.pem diff --git a/command/agentproxyshared/auth/cert/test-fixtures/root/rootcakey.pem b/command/agent/auth/cert/test-fixtures/root/rootcakey.pem similarity index 100% rename from command/agentproxyshared/auth/cert/test-fixtures/root/rootcakey.pem rename to command/agent/auth/cert/test-fixtures/root/rootcakey.pem diff --git a/command/agentproxyshared/auth/cf/cf.go b/command/agent/auth/cf/cf.go similarity index 94% rename from command/agentproxyshared/auth/cf/cf.go rename to command/agent/auth/cf/cf.go index 3ee2077fb71327..9508b7164f2be9 100644 --- a/command/agentproxyshared/auth/cf/cf.go +++ b/command/agent/auth/cf/cf.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cf import ( @@ -15,7 +12,7 @@ import ( cf "github.com/hashicorp/vault-plugin-auth-cf" "github.com/hashicorp/vault-plugin-auth-cf/signatures" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) type cfMethod struct { diff --git a/command/agentproxyshared/auth/gcp/gcp.go b/command/agent/auth/gcp/gcp.go similarity index 97% rename from command/agentproxyshared/auth/gcp/gcp.go rename to command/agent/auth/gcp/gcp.go index bb7c6bab6d57f3..45d9b74f9497be 100644 --- a/command/agentproxyshared/auth/gcp/gcp.go +++ b/command/agent/auth/gcp/gcp.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package gcp import ( @@ -17,7 +14,7 @@ import ( hclog "github.com/hashicorp/go-hclog" "github.com/hashicorp/go-secure-stdlib/parseutil" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" "golang.org/x/oauth2" "google.golang.org/api/iamcredentials/v1" ) diff --git a/command/agentproxyshared/auth/jwt/jwt.go b/command/agent/auth/jwt/jwt.go similarity index 61% rename from command/agentproxyshared/auth/jwt/jwt.go rename to command/agent/auth/jwt/jwt.go index fa878274344ff5..8f088eb199e5eb 100644 --- a/command/agentproxyshared/auth/jwt/jwt.go +++ b/command/agent/auth/jwt/jwt.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package jwt import ( @@ -10,32 +7,30 @@ import ( "io/fs" "net/http" "os" - "path/filepath" "sync" "sync/atomic" "time" hclog "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" "github.com/hashicorp/vault/sdk/helper/parseutil" ) type jwtMethod struct { - logger hclog.Logger - path string - mountPath string - role string - removeJWTAfterReading bool - removeJWTFollowsSymlinks bool - credsFound chan struct{} - watchCh chan string - stopCh chan struct{} - doneCh chan struct{} - credSuccessGate chan struct{} - ticker *time.Ticker - once *sync.Once - latestToken *atomic.Value + logger hclog.Logger + path string + mountPath string + role string + removeJWTAfterReading bool + credsFound chan struct{} + watchCh chan string + stopCh chan struct{} + doneCh chan struct{} + credSuccessGate chan struct{} + ticker *time.Ticker + once *sync.Once + latestToken *atomic.Value } // NewJWTAuthMethod returns an implementation of Agent's auth.AuthMethod @@ -88,14 +83,6 @@ func NewJWTAuthMethod(conf *auth.AuthConfig) (auth.AuthMethod, error) { j.removeJWTAfterReading = removeJWTAfterReading } - if removeJWTFollowsSymlinksRaw, ok := conf.Config["remove_jwt_follows_symlinks"]; ok { - removeJWTFollowsSymlinks, err := parseutil.ParseBool(removeJWTFollowsSymlinksRaw) - if err != nil { - return nil, fmt.Errorf("error parsing 'remove_jwt_follows_symlinks' value: %w", err) - } - j.removeJWTFollowsSymlinks = removeJWTFollowsSymlinks - } - switch { case j.path == "": return nil, errors.New("'path' value is empty") @@ -103,24 +90,13 @@ func NewJWTAuthMethod(conf *auth.AuthConfig) (auth.AuthMethod, error) { return nil, errors.New("'role' value is empty") } - // Default readPeriod + // If we don't delete the JWT after reading, use a slower reload period, + // otherwise we would re-read the whole file every 500ms, instead of just + // doing a stat on the file every 500ms. readPeriod := 1 * time.Minute - - if jwtReadPeriodRaw, ok := conf.Config["jwt_read_period"]; ok { - jwtReadPeriod, err := parseutil.ParseDurationSecond(jwtReadPeriodRaw) - if err != nil { - return nil, fmt.Errorf("error parsing 'jwt_read_period' value: %w", err) - } - readPeriod = jwtReadPeriod - } else { - // If we don't delete the JWT after reading, use a slower reload period, - // otherwise we would re-read the whole file every 500ms, instead of just - // doing a stat on the file every 500ms. - if j.removeJWTAfterReading { - readPeriod = 500 * time.Millisecond - } + if j.removeJWTAfterReading { + readPeriod = 500 * time.Millisecond } - j.ticker = time.NewTicker(readPeriod) go j.runWatcher() @@ -171,8 +147,8 @@ func (j *jwtMethod) runWatcher() { case <-j.credSuccessGate: // We only start the next loop once we're initially successful, - // since at startup Authenticate will be called, and we don't want - // to end up immediately re-authenticating by having found a new + // since at startup Authenticate will be called and we don't want + // to end up immediately reauthenticating by having found a new // value } @@ -206,27 +182,11 @@ func (j *jwtMethod) ingressToken() { // Check that the path refers to a file. // If it's a symlink, it could still be a symlink to a directory, // but os.ReadFile below will return a descriptive error. - evalSymlinkPath := j.path switch mode := fi.Mode(); { case mode.IsRegular(): // regular file case mode&fs.ModeSymlink != 0: - // If our file path is a symlink, we should also return early (like above) without error - // if the file that is linked to is not present, otherwise we will error when trying - // to read that file by following the link in the os.ReadFile call. - evalSymlinkPath, err = filepath.EvalSymlinks(j.path) - if err != nil { - j.logger.Error("error encountered evaluating symlinks", "error", err) - return - } - _, err := os.Stat(evalSymlinkPath) - if err != nil { - if os.IsNotExist(err) { - return - } - j.logger.Error("error encountered stat'ing jwt file after evaluating symlinks", "error", err) - return - } + // symlink default: j.logger.Error("jwt file is not a regular file or symlink") return @@ -247,13 +207,7 @@ func (j *jwtMethod) ingressToken() { } if j.removeJWTAfterReading { - pathToRemove := j.path - if j.removeJWTFollowsSymlinks { - // If removeJWTFollowsSymlinks is set, we follow the symlink and delete the jwt, - // not just the symlink that links to the jwt - pathToRemove = evalSymlinkPath - } - if err := os.Remove(pathToRemove); err != nil { + if err := os.Remove(j.path); err != nil { j.logger.Error("error removing jwt file", "error", err) } } diff --git a/command/agentproxyshared/auth/jwt/jwt_test.go b/command/agent/auth/jwt/jwt_test.go similarity index 62% rename from command/agentproxyshared/auth/jwt/jwt_test.go rename to command/agent/auth/jwt/jwt_test.go index 3e0db409024785..8e9a2ae86c1368 100644 --- a/command/agentproxyshared/auth/jwt/jwt_test.go +++ b/command/agent/auth/jwt/jwt_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package jwt import ( @@ -12,7 +9,7 @@ import ( "testing" "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) func TestIngressToken(t *testing.T) { @@ -168,95 +165,3 @@ func TestDeleteAfterReading(t *testing.T) { } } } - -func TestDeleteAfterReadingSymlink(t *testing.T) { - for _, tc := range map[string]struct { - configValue string - shouldDelete bool - removeJWTFollowsSymlinks bool - }{ - "default": { - "", - true, - false, - }, - "explicit true": { - "true", - true, - false, - }, - "false": { - "false", - false, - false, - }, - "default + removeJWTFollowsSymlinks": { - "", - true, - true, - }, - "explicit true + removeJWTFollowsSymlinks": { - "true", - true, - true, - }, - "false + removeJWTFollowsSymlinks": { - "false", - false, - true, - }, - } { - rootDir, err := os.MkdirTemp("", "vault-agent-jwt-auth-test") - if err != nil { - t.Fatalf("failed to create temp dir: %s", err) - } - defer os.RemoveAll(rootDir) - tokenPath := path.Join(rootDir, "token") - err = os.WriteFile(tokenPath, []byte("test"), 0o644) - if err != nil { - t.Fatal(err) - } - - symlink, err := os.CreateTemp("", "auth.jwt.symlink.test.") - if err != nil { - t.Fatal(err) - } - symlinkName := symlink.Name() - symlink.Close() - os.Remove(symlinkName) - os.Symlink(tokenPath, symlinkName) - - config := &auth.AuthConfig{ - Config: map[string]interface{}{ - "path": symlinkName, - "role": "unusedrole", - }, - Logger: hclog.Default(), - } - if tc.configValue != "" { - config.Config["remove_jwt_after_reading"] = tc.configValue - } - config.Config["remove_jwt_follows_symlinks"] = tc.removeJWTFollowsSymlinks - - jwtAuth, err := NewJWTAuthMethod(config) - if err != nil { - t.Fatal(err) - } - - jwtAuth.(*jwtMethod).ingressToken() - - pathToCheck := symlinkName - if tc.removeJWTFollowsSymlinks { - pathToCheck = tokenPath - } - if _, err := os.Lstat(pathToCheck); tc.shouldDelete { - if err == nil || !os.IsNotExist(err) { - t.Fatal(err) - } - } else { - if err != nil { - t.Fatal(err) - } - } - } -} diff --git a/command/agentproxyshared/auth/kerberos/integtest/integrationtest.sh b/command/agent/auth/kerberos/integtest/integrationtest.sh similarity index 98% rename from command/agentproxyshared/auth/kerberos/integtest/integrationtest.sh rename to command/agent/auth/kerberos/integtest/integrationtest.sh index b3d9edf65db9e8..28da55f599cf68 100755 --- a/command/agentproxyshared/auth/kerberos/integtest/integrationtest.sh +++ b/command/agent/auth/kerberos/integtest/integrationtest.sh @@ -1,7 +1,4 @@ #!/bin/bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # Instructions # This integration test is for the Vault Kerberos agent. # Before running, execute: diff --git a/command/agentproxyshared/auth/kerberos/kerberos.go b/command/agent/auth/kerberos/kerberos.go similarity index 95% rename from command/agentproxyshared/auth/kerberos/kerberos.go rename to command/agent/auth/kerberos/kerberos.go index 67a3109f5f5ecb..894c177d5c8a98 100644 --- a/command/agentproxyshared/auth/kerberos/kerberos.go +++ b/command/agent/auth/kerberos/kerberos.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kerberos import ( @@ -13,7 +10,7 @@ import ( "github.com/hashicorp/go-secure-stdlib/parseutil" kerberos "github.com/hashicorp/vault-plugin-auth-kerberos" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" "github.com/jcmturner/gokrb5/v8/spnego" ) diff --git a/command/agentproxyshared/auth/kerberos/kerberos_test.go b/command/agent/auth/kerberos/kerberos_test.go similarity index 94% rename from command/agentproxyshared/auth/kerberos/kerberos_test.go rename to command/agent/auth/kerberos/kerberos_test.go index 070893d7587672..4cfe3479ed4ce8 100644 --- a/command/agentproxyshared/auth/kerberos/kerberos_test.go +++ b/command/agent/auth/kerberos/kerberos_test.go @@ -1,13 +1,10 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kerberos import ( "testing" "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) func TestNewKerberosAuthMethod(t *testing.T) { diff --git a/command/agentproxyshared/auth/kubernetes/kubernetes.go b/command/agent/auth/kubernetes/kubernetes.go similarity index 95% rename from command/agentproxyshared/auth/kubernetes/kubernetes.go rename to command/agent/auth/kubernetes/kubernetes.go index acbb8c044cb584..c30f3cb5a68b13 100644 --- a/command/agentproxyshared/auth/kubernetes/kubernetes.go +++ b/command/agent/auth/kubernetes/kubernetes.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kubernetes import ( @@ -15,7 +12,7 @@ import ( hclog "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) const ( diff --git a/command/agentproxyshared/auth/kubernetes/kubernetes_test.go b/command/agent/auth/kubernetes/kubernetes_test.go similarity index 97% rename from command/agentproxyshared/auth/kubernetes/kubernetes_test.go rename to command/agent/auth/kubernetes/kubernetes_test.go index cbf617029c6aba..34f965c7709601 100644 --- a/command/agentproxyshared/auth/kubernetes/kubernetes_test.go +++ b/command/agent/auth/kubernetes/kubernetes_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kubernetes import ( @@ -12,7 +9,7 @@ import ( "github.com/hashicorp/errwrap" hclog "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" "github.com/hashicorp/vault/sdk/helper/logging" ) diff --git a/command/agentproxyshared/auth/token-file/token_file.go b/command/agent/auth/token-file/token_file.go similarity index 93% rename from command/agentproxyshared/auth/token-file/token_file.go rename to command/agent/auth/token-file/token_file.go index 4c7eaa22aba4ac..c5a8579371f423 100644 --- a/command/agentproxyshared/auth/token-file/token_file.go +++ b/command/agent/auth/token-file/token_file.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token_file import ( @@ -13,7 +10,7 @@ import ( "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" ) type tokenFileMethod struct { diff --git a/command/agentproxyshared/auth/token-file/token_file_test.go b/command/agent/auth/token-file/token_file_test.go similarity index 94% rename from command/agentproxyshared/auth/token-file/token_file_test.go rename to command/agent/auth/token-file/token_file_test.go index eb89fc02350e12..0dd73767144993 100644 --- a/command/agentproxyshared/auth/token-file/token_file_test.go +++ b/command/agent/auth/token-file/token_file_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token_file import ( @@ -9,7 +6,7 @@ import ( "testing" log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agentproxyshared/auth" + "github.com/hashicorp/vault/command/agent/auth" "github.com/hashicorp/vault/sdk/helper/logging" ) diff --git a/command/agent/auto_auth_preload_token_end_to_end_test.go b/command/agent/auto_auth_preload_token_end_to_end_test.go index 004e817dac1293..3f8d972a32cf75 100644 --- a/command/agent/auto_auth_preload_token_end_to_end_test.go +++ b/command/agent/auto_auth_preload_token_end_to_end_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -13,10 +10,10 @@ import ( hclog "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" credAppRole "github.com/hashicorp/vault/builtin/credential/approle" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentAppRole "github.com/hashicorp/vault/command/agentproxyshared/auth/approle" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" + "github.com/hashicorp/vault/command/agent/auth" + agentAppRole "github.com/hashicorp/vault/command/agent/auth/approle" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/logging" "github.com/hashicorp/vault/sdk/logical" diff --git a/command/agent/aws_end_to_end_test.go b/command/agent/aws_end_to_end_test.go index 08644bdc1d2e52..e8ed3a508b9fcc 100644 --- a/command/agent/aws_end_to_end_test.go +++ b/command/agent/aws_end_to_end_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -18,10 +15,10 @@ import ( uuid "github.com/hashicorp/go-uuid" "github.com/hashicorp/vault/api" vaultaws "github.com/hashicorp/vault/builtin/credential/aws" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentaws "github.com/hashicorp/vault/command/agentproxyshared/auth/aws" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" + "github.com/hashicorp/vault/command/agent/auth" + agentaws "github.com/hashicorp/vault/command/agent/auth/aws" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" "github.com/hashicorp/vault/helper/testhelpers" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/logging" diff --git a/command/agentproxyshared/cache/api_proxy.go b/command/agent/cache/api_proxy.go similarity index 74% rename from command/agentproxyshared/cache/api_proxy.go rename to command/agent/cache/api_proxy.go index e03bc1570b3c6c..1a754e064ec164 100644 --- a/command/agentproxyshared/cache/api_proxy.go +++ b/command/agent/cache/api_proxy.go @@ -1,12 +1,8 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( "context" "fmt" - gohttp "net/http" "sync" hclog "github.com/hashicorp/go-hclog" @@ -33,14 +29,12 @@ const ( // APIProxy is an implementation of the proxier interface that is used to // forward the request to Vault and get the response. type APIProxy struct { - client *api.Client - logger hclog.Logger - enforceConsistency EnforceConsistency - whenInconsistentAction WhenInconsistentAction - l sync.RWMutex - lastIndexStates []string - userAgentString string - userAgentStringFunction func(string) string + client *api.Client + logger hclog.Logger + enforceConsistency EnforceConsistency + whenInconsistentAction WhenInconsistentAction + l sync.RWMutex + lastIndexStates []string } var _ Proxier = &APIProxy{} @@ -50,12 +44,6 @@ type APIProxyConfig struct { Logger hclog.Logger EnforceConsistency EnforceConsistency WhenInconsistentAction WhenInconsistentAction - // UserAgentString is used as the User Agent when the proxied client - // does not have a user agent of its own. - UserAgentString string - // UserAgentStringFunction is the function to transform the proxied client's - // user agent into one that includes Vault-specific information. - UserAgentStringFunction func(string) string } func NewAPIProxy(config *APIProxyConfig) (Proxier, error) { @@ -63,12 +51,10 @@ func NewAPIProxy(config *APIProxyConfig) (Proxier, error) { return nil, fmt.Errorf("nil API client") } return &APIProxy{ - client: config.Client, - logger: config.Logger, - enforceConsistency: config.EnforceConsistency, - whenInconsistentAction: config.WhenInconsistentAction, - userAgentString: config.UserAgentString, - userAgentStringFunction: config.UserAgentStringFunction, + client: config.Client, + logger: config.Logger, + enforceConsistency: config.EnforceConsistency, + whenInconsistentAction: config.WhenInconsistentAction, }, nil } @@ -87,20 +73,6 @@ func (ap *APIProxy) Send(ctx context.Context, req *SendRequest) (*SendResponse, // the client doesn't manually set the header. Removing any Accept-Encoding header allows the // transparent compression to occur. req.Request.Header.Del("Accept-Encoding") - - if req.Request.Header == nil { - req.Request.Header = make(gohttp.Header) - } - - // Set our User-Agent to be one indicating we are Vault Agent's API proxy. - // If the sending client had one, preserve it. - if req.Request.Header.Get("User-Agent") != "" { - initialUserAgent := req.Request.Header.Get("User-Agent") - req.Request.Header.Set("User-Agent", ap.userAgentStringFunction(initialUserAgent)) - } else { - req.Request.Header.Set("User-Agent", ap.userAgentString) - } - client.SetHeaders(req.Request.Header) fwReq := client.NewRequest(req.Request.Method, req.Request.URL.Path) diff --git a/command/agentproxyshared/cache/api_proxy_test.go b/command/agent/cache/api_proxy_test.go similarity index 89% rename from command/agentproxyshared/cache/api_proxy_test.go rename to command/agent/cache/api_proxy_test.go index 6671b17fdaa837..aec0b72d1fc959 100644 --- a/command/agentproxyshared/cache/api_proxy_test.go +++ b/command/agent/cache/api_proxy_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( @@ -12,8 +9,6 @@ import ( "testing" "time" - "github.com/hashicorp/vault/helper/useragent" - "github.com/hashicorp/vault/builtin/credential/userpass" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/logical" @@ -37,10 +32,8 @@ func TestAPIProxy(t *testing.T) { defer cleanup() proxier, err := NewAPIProxy(&APIProxyConfig{ - Client: client, - Logger: logging.NewVaultLogger(hclog.Trace), - UserAgentStringFunction: useragent.ProxyStringWithProxiedUserAgent, - UserAgentString: useragent.ProxyAPIProxyString(), + Client: client, + Logger: logging.NewVaultLogger(hclog.Trace), }) if err != nil { t.Fatal(err) @@ -75,10 +68,8 @@ func TestAPIProxyNoCache(t *testing.T) { defer cleanup() proxier, err := NewAPIProxy(&APIProxyConfig{ - Client: client, - Logger: logging.NewVaultLogger(hclog.Trace), - UserAgentStringFunction: useragent.ProxyStringWithProxiedUserAgent, - UserAgentString: useragent.ProxyAPIProxyString(), + Client: client, + Logger: logging.NewVaultLogger(hclog.Trace), }) if err != nil { t.Fatal(err) @@ -115,10 +106,8 @@ func TestAPIProxy_queryParams(t *testing.T) { defer cleanup() proxier, err := NewAPIProxy(&APIProxyConfig{ - Client: client, - Logger: logging.NewVaultLogger(hclog.Trace), - UserAgentStringFunction: useragent.ProxyStringWithProxiedUserAgent, - UserAgentString: useragent.ProxyAPIProxyString(), + Client: client, + Logger: logging.NewVaultLogger(hclog.Trace), }) if err != nil { t.Fatal(err) @@ -261,10 +250,8 @@ func setupClusterAndAgentCommon(ctx context.Context, t *testing.T, coreConfig *v // Create the API proxier apiProxy, err := NewAPIProxy(&APIProxyConfig{ - Client: clienToUse, - Logger: apiProxyLogger, - UserAgentStringFunction: useragent.ProxyStringWithProxiedUserAgent, - UserAgentString: useragent.ProxyAPIProxyString(), + Client: clienToUse, + Logger: apiProxyLogger, }) if err != nil { t.Fatal(err) diff --git a/command/agentproxyshared/cache/cache_test.go b/command/agent/cache/cache_test.go similarity index 99% rename from command/agentproxyshared/cache/cache_test.go rename to command/agent/cache/cache_test.go index 4786950bd5a01d..de66f86cc78b29 100644 --- a/command/agentproxyshared/cache/cache_test.go +++ b/command/agent/cache/cache_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( @@ -19,8 +16,8 @@ import ( "github.com/hashicorp/go-hclog" kv "github.com/hashicorp/vault-plugin-secrets-kv" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/cache/cachememdb" - "github.com/hashicorp/vault/command/agentproxyshared/sink/mock" + "github.com/hashicorp/vault/command/agent/cache/cachememdb" + "github.com/hashicorp/vault/command/agent/sink/mock" "github.com/hashicorp/vault/helper/namespace" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/consts" diff --git a/command/agentproxyshared/cache/cacheboltdb/bolt.go b/command/agent/cache/cacheboltdb/bolt.go similarity index 99% rename from command/agentproxyshared/cache/cacheboltdb/bolt.go rename to command/agent/cache/cacheboltdb/bolt.go index 434b4116542a4c..72cb7f3b824642 100644 --- a/command/agentproxyshared/cache/cacheboltdb/bolt.go +++ b/command/agent/cache/cacheboltdb/bolt.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cacheboltdb import ( diff --git a/command/agentproxyshared/cache/cacheboltdb/bolt_test.go b/command/agent/cache/cacheboltdb/bolt_test.go similarity index 98% rename from command/agentproxyshared/cache/cacheboltdb/bolt_test.go rename to command/agent/cache/cacheboltdb/bolt_test.go index 95aacd27cef03b..d6f5a742ef34a2 100644 --- a/command/agentproxyshared/cache/cacheboltdb/bolt_test.go +++ b/command/agent/cache/cacheboltdb/bolt_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cacheboltdb import ( @@ -16,7 +13,7 @@ import ( "github.com/golang/protobuf/proto" "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agentproxyshared/cache/keymanager" + "github.com/hashicorp/vault/command/agent/cache/keymanager" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" bolt "go.etcd.io/bbolt" diff --git a/command/agentproxyshared/cache/cachememdb/cache_memdb.go b/command/agent/cache/cachememdb/cache_memdb.go similarity index 98% rename from command/agentproxyshared/cache/cachememdb/cache_memdb.go rename to command/agent/cache/cachememdb/cache_memdb.go index 93aa2bf78faf8c..7fdad303bb5694 100644 --- a/command/agentproxyshared/cache/cachememdb/cache_memdb.go +++ b/command/agent/cache/cachememdb/cache_memdb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cachememdb import ( diff --git a/command/agentproxyshared/cache/cachememdb/cache_memdb_test.go b/command/agent/cache/cachememdb/cache_memdb_test.go similarity index 99% rename from command/agentproxyshared/cache/cachememdb/cache_memdb_test.go rename to command/agent/cache/cachememdb/cache_memdb_test.go index 87b8eee798b2fd..4162fed0daf7be 100644 --- a/command/agentproxyshared/cache/cachememdb/cache_memdb_test.go +++ b/command/agent/cache/cachememdb/cache_memdb_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cachememdb import ( diff --git a/command/agentproxyshared/cache/cachememdb/index.go b/command/agent/cache/cachememdb/index.go similarity index 98% rename from command/agentproxyshared/cache/cachememdb/index.go rename to command/agent/cache/cachememdb/index.go index a7da2edc2514c2..546a528cb2e40d 100644 --- a/command/agentproxyshared/cache/cachememdb/index.go +++ b/command/agent/cache/cachememdb/index.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cachememdb import ( diff --git a/command/agentproxyshared/cache/cachememdb/index_test.go b/command/agent/cache/cachememdb/index_test.go similarity index 94% rename from command/agentproxyshared/cache/cachememdb/index_test.go rename to command/agent/cache/cachememdb/index_test.go index c59ec5cba33443..577e37d647cdca 100644 --- a/command/agentproxyshared/cache/cachememdb/index_test.go +++ b/command/agent/cache/cachememdb/index_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cachememdb import ( diff --git a/command/agentproxyshared/cache/handler.go b/command/agent/cache/handler.go similarity index 97% rename from command/agentproxyshared/cache/handler.go rename to command/agent/cache/handler.go index bfb4434dc22f15..e634174c61ae90 100644 --- a/command/agentproxyshared/cache/handler.go +++ b/command/agent/cache/handler.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( @@ -18,7 +15,7 @@ import ( "github.com/armon/go-metrics" "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/sink" + "github.com/hashicorp/vault/command/agent/sink" "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/logical" ) diff --git a/command/agentproxyshared/cache/keymanager/manager.go b/command/agent/cache/keymanager/manager.go similarity index 88% rename from command/agentproxyshared/cache/keymanager/manager.go rename to command/agent/cache/keymanager/manager.go index 0cecc03a11f7d1..ff4d0f2c00fa80 100644 --- a/command/agentproxyshared/cache/keymanager/manager.go +++ b/command/agent/cache/keymanager/manager.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keymanager import ( diff --git a/command/agentproxyshared/cache/keymanager/passthrough.go b/command/agent/cache/keymanager/passthrough.go similarity index 96% rename from command/agentproxyshared/cache/keymanager/passthrough.go rename to command/agent/cache/keymanager/passthrough.go index cda6b6e5db3493..68a1fc221b6234 100644 --- a/command/agentproxyshared/cache/keymanager/passthrough.go +++ b/command/agent/cache/keymanager/passthrough.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keymanager import ( diff --git a/command/agentproxyshared/cache/keymanager/passthrough_test.go b/command/agent/cache/keymanager/passthrough_test.go similarity index 93% rename from command/agentproxyshared/cache/keymanager/passthrough_test.go rename to command/agent/cache/keymanager/passthrough_test.go index 9327ee3f0ec632..084a71a143f262 100644 --- a/command/agentproxyshared/cache/keymanager/passthrough_test.go +++ b/command/agent/cache/keymanager/passthrough_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keymanager import ( diff --git a/command/agentproxyshared/cache/lease_cache.go b/command/agent/cache/lease_cache.go similarity index 97% rename from command/agentproxyshared/cache/lease_cache.go rename to command/agent/cache/lease_cache.go index 3bcb58002a6d67..87bfacd97ec117 100644 --- a/command/agentproxyshared/cache/lease_cache.go +++ b/command/agent/cache/lease_cache.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( @@ -23,11 +20,10 @@ import ( "github.com/hashicorp/go-multierror" "github.com/hashicorp/go-secure-stdlib/base62" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/cache/cacheboltdb" - "github.com/hashicorp/vault/command/agentproxyshared/cache/cachememdb" + "github.com/hashicorp/vault/command/agent/cache/cacheboltdb" + "github.com/hashicorp/vault/command/agent/cache/cachememdb" "github.com/hashicorp/vault/helper/namespace" nshelper "github.com/hashicorp/vault/helper/namespace" - "github.com/hashicorp/vault/helper/useragent" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/helper/cryptoutil" @@ -174,12 +170,6 @@ func (c *LeaseCache) SetPersistentStorage(storageIn *cacheboltdb.BoltStorage) { c.ps = storageIn } -// PersistentStorage is a getter for the persistent storage field in -// LeaseCache -func (c *LeaseCache) PersistentStorage() *cacheboltdb.BoltStorage { - return c.ps -} - // checkCacheForRequest checks the cache for a particular request based on its // computed ID. It returns a non-nil *SendResponse if an entry is found. func (c *LeaseCache) checkCacheForRequest(id string) (*SendResponse, error) { @@ -484,18 +474,7 @@ func (c *LeaseCache) startRenewing(ctx context.Context, index *cachememdb.Index, return } client.SetToken(req.Token) - - headers := client.Headers() - if headers == nil { - headers = make(http.Header) - } - - // We do not preserve the initial User-Agent here (i.e. use - // AgentProxyStringWithProxiedUserAgent) since these requests are from - // the proxy subsystem, but are made by Agent's lifetime watcher, - // not triggered by a specific request. - headers.Set("User-Agent", useragent.AgentProxyString()) - client.SetHeaders(headers) + client.SetHeaders(req.Request.Header) watcher, err := client.NewLifetimeWatcher(&api.LifetimeWatcherInput{ Secret: secret, diff --git a/command/agentproxyshared/cache/lease_cache_test.go b/command/agent/cache/lease_cache_test.go similarity index 97% rename from command/agentproxyshared/cache/lease_cache_test.go rename to command/agent/cache/lease_cache_test.go index 2de4c56b09bb57..1501fcfe56db0e 100644 --- a/command/agentproxyshared/cache/lease_cache_test.go +++ b/command/agent/cache/lease_cache_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( @@ -21,10 +18,9 @@ import ( hclog "github.com/hashicorp/go-hclog" "github.com/hashicorp/go-multierror" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/cache/cacheboltdb" - "github.com/hashicorp/vault/command/agentproxyshared/cache/cachememdb" - "github.com/hashicorp/vault/command/agentproxyshared/cache/keymanager" - "github.com/hashicorp/vault/helper/useragent" + "github.com/hashicorp/vault/command/agent/cache/cacheboltdb" + "github.com/hashicorp/vault/command/agent/cache/cachememdb" + "github.com/hashicorp/vault/command/agent/cache/keymanager" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/helper/logging" @@ -43,7 +39,7 @@ func testNewLeaseCache(t *testing.T, responses []*SendResponse) *LeaseCache { lc, err := NewLeaseCache(&LeaseCacheConfig{ Client: client, BaseContext: context.Background(), - Proxier: NewMockProxier(responses), + Proxier: newMockProxier(responses), Logger: logging.NewVaultLogger(hclog.Trace).Named("cache.leasecache"), }) if err != nil { @@ -82,7 +78,7 @@ func testNewLeaseCacheWithPersistence(t *testing.T, responses []*SendResponse, s lc, err := NewLeaseCache(&LeaseCacheConfig{ Client: client, BaseContext: context.Background(), - Proxier: NewMockProxier(responses), + Proxier: newMockProxier(responses), Logger: logging.NewVaultLogger(hclog.Trace).Named("cache.leasecache"), Storage: storage, }) @@ -738,7 +734,7 @@ func compareBeforeAndAfter(t *testing.T, before, after *LeaseCache, beforeLen, a assert.Equal(t, cachedItem.Lease, restoredItem.Lease) assert.Equal(t, cachedItem.LeaseToken, restoredItem.LeaseToken) assert.Equal(t, cachedItem.Namespace, restoredItem.Namespace) - assert.EqualValues(t, cachedItem.RequestHeader, restoredItem.RequestHeader) + assert.Equal(t, cachedItem.RequestHeader, restoredItem.RequestHeader) assert.Equal(t, cachedItem.RequestMethod, restoredItem.RequestMethod) assert.Equal(t, cachedItem.RequestPath, restoredItem.RequestPath) assert.Equal(t, cachedItem.RequestToken, restoredItem.RequestToken) @@ -843,21 +839,16 @@ func TestLeaseCache_PersistAndRestore(t *testing.T) { var deleteIDs []string for i, ct := range cacheTests { // Send once to cache - req := httptest.NewRequest(ct.method, ct.urlPath, strings.NewReader(ct.body)) - req.Header.Set("User-Agent", useragent.AgentProxyString()) - sendReq := &SendRequest{ Token: ct.token, - Request: req, + Request: httptest.NewRequest(ct.method, ct.urlPath, strings.NewReader(ct.body)), } if ct.deleteFromPersistentStore { deleteID, err := computeIndexID(sendReq) require.NoError(t, err) deleteIDs = append(deleteIDs, deleteID) // Now reset the body after calculating the index - req = httptest.NewRequest(ct.method, ct.urlPath, strings.NewReader(ct.body)) - req.Header.Set("User-Agent", useragent.AgentProxyString()) - sendReq.Request = req + sendReq.Request = httptest.NewRequest(ct.method, ct.urlPath, strings.NewReader(ct.body)) } resp, err := lc.Send(context.Background(), sendReq) require.NoError(t, err) @@ -866,11 +857,9 @@ func TestLeaseCache_PersistAndRestore(t *testing.T) { // Send again to test cache. If this isn't cached, the response returned // will be the next in the list and the status code will not match. - req = httptest.NewRequest(ct.method, ct.urlPath, strings.NewReader(ct.body)) - req.Header.Set("User-Agent", useragent.AgentProxyString()) sendCacheReq := &SendRequest{ Token: ct.token, - Request: req, + Request: httptest.NewRequest(ct.method, ct.urlPath, strings.NewReader(ct.body)), } respCached, err := lc.Send(context.Background(), sendCacheReq) require.NoError(t, err, "failed to send request %+v", ct) @@ -902,11 +891,9 @@ func TestLeaseCache_PersistAndRestore(t *testing.T) { // And finally send the cache requests once to make sure they're all being // served from the restoredCache unless they were intended to be missing after restore. for i, ct := range cacheTests { - req := httptest.NewRequest(ct.method, ct.urlPath, strings.NewReader(ct.body)) - req.Header.Set("User-Agent", useragent.AgentProxyString()) sendCacheReq := &SendRequest{ Token: ct.token, - Request: req, + Request: httptest.NewRequest(ct.method, ct.urlPath, strings.NewReader(ct.body)), } respCached, err := restoredCache.Send(context.Background(), sendCacheReq) require.NoError(t, err, "failed to send request %+v", ct) diff --git a/command/agentproxyshared/cache/listener.go b/command/agent/cache/listener.go similarity index 96% rename from command/agentproxyshared/cache/listener.go rename to command/agent/cache/listener.go index c8ed7221914807..ec1ddf2c965aaa 100644 --- a/command/agentproxyshared/cache/listener.go +++ b/command/agent/cache/listener.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( diff --git a/command/agentproxyshared/cache/proxy.go b/command/agent/cache/proxy.go similarity index 96% rename from command/agentproxyshared/cache/proxy.go rename to command/agent/cache/proxy.go index 4dcd1803389fb1..af9267ba01cc54 100644 --- a/command/agentproxyshared/cache/proxy.go +++ b/command/agent/cache/proxy.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( diff --git a/command/agentproxyshared/cache/testing.go b/command/agent/cache/testing.go similarity index 89% rename from command/agentproxyshared/cache/testing.go rename to command/agent/cache/testing.go index 9fe9e6f1345c05..9ec637be4e0e50 100644 --- a/command/agentproxyshared/cache/testing.go +++ b/command/agent/cache/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cache import ( @@ -13,8 +10,6 @@ import ( "strings" "time" - "github.com/hashicorp/vault/helper/useragent" - "github.com/hashicorp/vault/api" ) @@ -27,7 +22,7 @@ type mockProxier struct { responseIndex int } -func NewMockProxier(responses []*SendResponse) *mockProxier { +func newMockProxier(responses []*SendResponse) *mockProxier { return &mockProxier{ proxiedResponses: responses, } @@ -49,13 +44,11 @@ func (p *mockProxier) ResponseIndex() int { } func newTestSendResponse(status int, body string) *SendResponse { - headers := make(http.Header) - headers.Add("User-Agent", useragent.AgentProxyString()) resp := &SendResponse{ Response: &api.Response{ Response: &http.Response{ StatusCode: status, - Header: headers, + Header: http.Header{}, }, }, } diff --git a/command/agent/cache_end_to_end_test.go b/command/agent/cache_end_to_end_test.go index a2a359abc9bd93..6337c918a6983d 100644 --- a/command/agent/cache_end_to_end_test.go +++ b/command/agent/cache_end_to_end_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -17,13 +14,12 @@ import ( log "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/api" credAppRole "github.com/hashicorp/vault/builtin/credential/approle" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentapprole "github.com/hashicorp/vault/command/agentproxyshared/auth/approle" - cache "github.com/hashicorp/vault/command/agentproxyshared/cache" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" - "github.com/hashicorp/vault/command/agentproxyshared/sink/inmem" - "github.com/hashicorp/vault/helper/useragent" + "github.com/hashicorp/vault/command/agent/auth" + agentapprole "github.com/hashicorp/vault/command/agent/auth/approle" + "github.com/hashicorp/vault/command/agent/cache" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" + "github.com/hashicorp/vault/command/agent/sink/inmem" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/helper/logging" @@ -167,10 +163,8 @@ func TestCache_UsingAutoAuthToken(t *testing.T) { // Create the API proxier apiProxy, err := cache.NewAPIProxy(&cache.APIProxyConfig{ - Client: client, - Logger: cacheLogger.Named("apiproxy"), - UserAgentStringFunction: useragent.ProxyStringWithProxiedUserAgent, - UserAgentString: useragent.ProxyAPIProxyString(), + Client: client, + Logger: cacheLogger.Named("apiproxy"), }) if err != nil { t.Fatal(err) diff --git a/command/agent/cert_end_to_end_test.go b/command/agent/cert_end_to_end_test.go index 12ea933ed60e65..bacb188021cda4 100644 --- a/command/agent/cert_end_to_end_test.go +++ b/command/agent/cert_end_to_end_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -12,14 +9,16 @@ import ( "testing" "time" + "github.com/hashicorp/vault/builtin/logical/pki" + hclog "github.com/hashicorp/go-hclog" + "github.com/hashicorp/vault/api" vaultcert "github.com/hashicorp/vault/builtin/credential/cert" - "github.com/hashicorp/vault/builtin/logical/pki" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentcert "github.com/hashicorp/vault/command/agentproxyshared/auth/cert" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" + "github.com/hashicorp/vault/command/agent/auth" + agentcert "github.com/hashicorp/vault/command/agent/auth/cert" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" "github.com/hashicorp/vault/helper/dhutil" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/jsonutil" diff --git a/command/agent/cf_end_to_end_test.go b/command/agent/cf_end_to_end_test.go index e143223af1d964..6bc1fa8b6a077e 100644 --- a/command/agent/cf_end_to_end_test.go +++ b/command/agent/cf_end_to_end_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -15,10 +12,10 @@ import ( "github.com/hashicorp/vault-plugin-auth-cf/testing/certificates" cfAPI "github.com/hashicorp/vault-plugin-auth-cf/testing/cf" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentcf "github.com/hashicorp/vault/command/agentproxyshared/auth/cf" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" + "github.com/hashicorp/vault/command/agent/auth" + agentcf "github.com/hashicorp/vault/command/agent/auth/cf" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/logging" "github.com/hashicorp/vault/sdk/logical" diff --git a/command/agent/config/config.go b/command/agent/config/config.go index eea108b9fab3a9..230a2dddfab6ba 100644 --- a/command/agent/config/config.go +++ b/command/agent/config/config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package config import ( @@ -12,22 +9,16 @@ import ( "os" "path/filepath" "strings" - "syscall" "time" ctconfig "github.com/hashicorp/consul-template/config" - ctsignals "github.com/hashicorp/consul-template/signals" "github.com/hashicorp/go-multierror" "github.com/hashicorp/go-secure-stdlib/parseutil" "github.com/hashicorp/hcl" "github.com/hashicorp/hcl/hcl/ast" - "github.com/mitchellh/mapstructure" - "k8s.io/utils/strings/slices" - - "github.com/hashicorp/vault/command/agentproxyshared" "github.com/hashicorp/vault/helper/namespace" "github.com/hashicorp/vault/internalshared/configutil" - "github.com/hashicorp/vault/sdk/helper/pointerutil" + "github.com/mitchellh/mapstructure" ) // Config is the configuration for Vault Agent. @@ -37,7 +28,7 @@ type Config struct { AutoAuth *AutoAuth `hcl:"auto_auth"` ExitAfterAuth bool `hcl:"exit_after_auth"` Cache *Cache `hcl:"cache"` - APIProxy *APIProxy `hcl:"api_proxy"` + APIProxy *APIProxy `hcl:"api_proxy""` Vault *Vault `hcl:"vault"` TemplateConfig *TemplateConfig `hcl:"template_config"` Templates []*ctconfig.TemplateConfig `hcl:"templates"` @@ -49,8 +40,6 @@ type Config struct { DisableKeepAlivesAPIProxy bool `hcl:"-"` DisableKeepAlivesTemplating bool `hcl:"-"` DisableKeepAlivesAutoAuth bool `hcl:"-"` - Exec *ExecConfig `hcl:"exec,optional"` - EnvTemplates []*ctconfig.TemplateConfig `hcl:"env_template,optional"` } const ( @@ -113,13 +102,22 @@ type APIProxy struct { // Cache contains any configuration needed for Cache mode type Cache struct { - UseAutoAuthTokenRaw interface{} `hcl:"use_auto_auth_token"` - UseAutoAuthToken bool `hcl:"-"` - ForceAutoAuthToken bool `hcl:"-"` - EnforceConsistency string `hcl:"enforce_consistency"` - WhenInconsistent string `hcl:"when_inconsistent"` - Persist *agentproxyshared.PersistConfig `hcl:"persist"` - InProcDialer transportDialer `hcl:"-"` + UseAutoAuthTokenRaw interface{} `hcl:"use_auto_auth_token"` + UseAutoAuthToken bool `hcl:"-"` + ForceAutoAuthToken bool `hcl:"-"` + EnforceConsistency string `hcl:"enforce_consistency"` + WhenInconsistent string `hcl:"when_inconsistent"` + Persist *Persist `hcl:"persist"` + InProcDialer transportDialer `hcl:"-"` +} + +// Persist contains configuration needed for persistent caching +type Persist struct { + Type string + Path string `hcl:"path"` + KeepAfterImport bool `hcl:"keep_after_import"` + ExitOnErr bool `hcl:"exit_on_err"` + ServiceAccountTokenFile string `hcl:"service_account_token_file"` } // AutoAuth is the configured authentication method and sinks @@ -167,12 +165,6 @@ type TemplateConfig struct { StaticSecretRenderInt time.Duration `hcl:"-"` } -type ExecConfig struct { - Command []string `hcl:"command,attr" mapstructure:"command"` - RestartOnSecretChanges string `hcl:"restart_on_secret_changes,optional" mapstructure:"restart_on_secret_changes"` - RestartStopSignal os.Signal `hcl:"-" mapstructure:"restart_stop_signal"` -} - func NewConfig() *Config { return &Config{ SharedConfig: new(configutil.SharedConfig), @@ -270,33 +262,9 @@ func (c *Config) Merge(c2 *Config) *Config { result.PidFile = c2.PidFile } - result.Exec = c.Exec - if c2.Exec != nil { - result.Exec = c2.Exec - } - - for _, envTmpl := range c.EnvTemplates { - result.EnvTemplates = append(result.EnvTemplates, envTmpl) - } - - for _, envTmpl := range c2.EnvTemplates { - result.EnvTemplates = append(result.EnvTemplates, envTmpl) - } - return result } -// IsDefaultListerDefined returns true if a default listener has been defined -// in this config -func (c *Config) IsDefaultListerDefined() bool { - for _, l := range c.Listeners { - if l.Role != "metrics_only" { - return true - } - } - return false -} - // ValidateConfig validates an Agent configuration after it has been fully merged together, to // ensure that required combinations of configs are there func (c *Config) ValidateConfig() error { @@ -311,7 +279,7 @@ func (c *Config) ValidateConfig() error { } if c.Cache != nil { - if len(c.Listeners) < 1 && len(c.Templates) < 1 && len(c.EnvTemplates) < 1 { + if len(c.Listeners) < 1 && len(c.Templates) < 1 { return fmt.Errorf("enabling the cache requires at least 1 template or 1 listener to be defined") } @@ -343,8 +311,7 @@ func (c *Config) ValidateConfig() error { if c.AutoAuth != nil { if len(c.AutoAuth.Sinks) == 0 && (c.APIProxy == nil || !c.APIProxy.UseAutoAuthToken) && - len(c.Templates) == 0 && - len(c.EnvTemplates) == 0 { + len(c.Templates) == 0 { return fmt.Errorf("auto_auth requires at least one sink or at least one template or api_proxy.use_auto_auth_token=true") } } @@ -353,126 +320,6 @@ func (c *Config) ValidateConfig() error { return fmt.Errorf("no auto_auth, cache, or listener block found in config") } - return c.validateEnvTemplateConfig() -} - -func (c *Config) validateEnvTemplateConfig() error { - // if we are not in env-template mode, exit early - if c.Exec == nil && len(c.EnvTemplates) == 0 { - return nil - } - - if c.Exec == nil { - return fmt.Errorf("a top-level 'exec' element must be specified with 'env_template' entries") - } - - if len(c.EnvTemplates) == 0 { - return fmt.Errorf("must specify at least one 'env_template' element with a top-level 'exec' element") - } - - if c.APIProxy != nil { - return fmt.Errorf("'api_proxy' cannot be specified with 'env_template' entries") - } - - if len(c.Templates) > 0 { - return fmt.Errorf("'template' cannot be specified with 'env_template' entries") - } - - if len(c.Exec.Command) == 0 { - return fmt.Errorf("'exec' requires a non-empty 'command' field") - } - - if !slices.Contains([]string{"always", "never"}, c.Exec.RestartOnSecretChanges) { - return fmt.Errorf("'exec.restart_on_secret_changes' unexpected value: %q", c.Exec.RestartOnSecretChanges) - } - - uniqueKeys := make(map[string]struct{}) - - for _, template := range c.EnvTemplates { - // Required: - // - the key (environment variable name) - // - either "contents" or "source" - // Optional / permitted: - // - error_on_missing_key - // - error_fatal - // - left_delimiter - // - right_delimiter - // - ExtFuncMap - // - function_denylist / function_blacklist - - if template.MapToEnvironmentVariable == nil { - return fmt.Errorf("env_template: an environment variable name is required") - } - - key := *template.MapToEnvironmentVariable - - if _, exists := uniqueKeys[key]; exists { - return fmt.Errorf("env_template: duplicate environment variable name: %q", key) - } - - uniqueKeys[key] = struct{}{} - - if template.Contents == nil && template.Source == nil { - return fmt.Errorf("env_template[%s]: either 'contents' or 'source' must be specified", key) - } - - if template.Contents != nil && template.Source != nil { - return fmt.Errorf("env_template[%s]: 'contents' and 'source' cannot be specified together", key) - } - - if template.Backup != nil { - return fmt.Errorf("env_template[%s]: 'backup' is not allowed", key) - } - - if template.Command != nil { - return fmt.Errorf("env_template[%s]: 'command' is not allowed", key) - } - - if template.CommandTimeout != nil { - return fmt.Errorf("env_template[%s]: 'command_timeout' is not allowed", key) - } - - if template.CreateDestDirs != nil { - return fmt.Errorf("env_template[%s]: 'create_dest_dirs' is not allowed", key) - } - - if template.Destination != nil { - return fmt.Errorf("env_template[%s]: 'destination' is not allowed", key) - } - - if template.Exec != nil { - return fmt.Errorf("env_template[%s]: 'exec' is not allowed", key) - } - - if template.Perms != nil { - return fmt.Errorf("env_template[%s]: 'perms' is not allowed", key) - } - - if template.User != nil { - return fmt.Errorf("env_template[%s]: 'user' is not allowed", key) - } - - if template.Uid != nil { - return fmt.Errorf("env_template[%s]: 'uid' is not allowed", key) - } - - if template.Group != nil { - return fmt.Errorf("env_template[%s]: 'group' is not allowed", key) - } - - if template.Gid != nil { - return fmt.Errorf("env_template[%s]: 'gid' is not allowed", key) - } - - if template.Wait != nil { - return fmt.Errorf("env_template[%s]: 'wait' is not allowed", key) - } - - if template.SandboxPath != nil { - return fmt.Errorf("env_template[%s]: 'sandbox_path' is not allowed", key) - } - } - return nil } @@ -638,15 +485,7 @@ func LoadConfigFile(path string) (*Config, error) { return nil, fmt.Errorf("error parsing 'template': %w", err) } - if err := parseExec(result, list); err != nil { - return nil, fmt.Errorf("error parsing 'exec': %w", err) - } - - if err := parseEnvTemplates(result, list); err != nil { - return nil, fmt.Errorf("error parsing 'env_template': %w", err) - } - - if result.Cache != nil && result.APIProxy == nil && (result.Cache.UseAutoAuthToken || result.Cache.ForceAutoAuthToken) { + if result.Cache != nil && result.APIProxy == nil { result.APIProxy = &APIProxy{ UseAutoAuthToken: result.Cache.UseAutoAuthToken, ForceAutoAuthToken: result.Cache.ForceAutoAuthToken, @@ -895,7 +734,7 @@ func parsePersist(result *Config, list *ast.ObjectList) error { item := persistList.Items[0] - var p agentproxyshared.PersistConfig + var p Persist err := hcl.DecodeObject(&p, item.Val) if err != nil { return err @@ -1193,121 +1032,3 @@ func parseTemplates(result *Config, list *ast.ObjectList) error { result.Templates = tcs return nil } - -func parseExec(result *Config, list *ast.ObjectList) error { - name := "exec" - - execList := list.Filter(name) - if len(execList.Items) == 0 { - return nil - } - - if len(execList.Items) > 1 { - return fmt.Errorf("at most one %q block is allowed", name) - } - - item := execList.Items[0] - var shadow interface{} - if err := hcl.DecodeObject(&shadow, item.Val); err != nil { - return fmt.Errorf("error decoding config: %s", err) - } - - parsed, ok := shadow.(map[string]interface{}) - if !ok { - return errors.New("error converting config") - } - - var execConfig ExecConfig - var md mapstructure.Metadata - decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - DecodeHook: mapstructure.ComposeDecodeHookFunc( - ctconfig.StringToFileModeFunc(), - ctconfig.StringToWaitDurationHookFunc(), - mapstructure.StringToSliceHookFunc(","), - mapstructure.StringToTimeDurationHookFunc(), - ctsignals.StringToSignalFunc(), - ), - ErrorUnused: true, - Metadata: &md, - Result: &execConfig, - }) - if err != nil { - return errors.New("mapstructure decoder creation failed") - } - if err := decoder.Decode(parsed); err != nil { - return err - } - - // if the user does not specify a restart signal, default to SIGTERM - if execConfig.RestartStopSignal == nil { - execConfig.RestartStopSignal = syscall.SIGTERM - } - - if execConfig.RestartOnSecretChanges == "" { - execConfig.RestartOnSecretChanges = "always" - } - - result.Exec = &execConfig - return nil -} - -func parseEnvTemplates(result *Config, list *ast.ObjectList) error { - name := "env_template" - - envTemplateList := list.Filter(name) - - if len(envTemplateList.Items) < 1 { - return nil - } - - envTemplates := make([]*ctconfig.TemplateConfig, 0, len(envTemplateList.Items)) - - for _, item := range envTemplateList.Items { - var shadow interface{} - if err := hcl.DecodeObject(&shadow, item.Val); err != nil { - return fmt.Errorf("error decoding config: %s", err) - } - - // Convert to a map and flatten the keys we want to flatten - parsed, ok := shadow.(map[string]any) - if !ok { - return errors.New("error converting config") - } - - var templateConfig ctconfig.TemplateConfig - var md mapstructure.Metadata - decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - DecodeHook: mapstructure.ComposeDecodeHookFunc( - ctconfig.StringToFileModeFunc(), - ctconfig.StringToWaitDurationHookFunc(), - mapstructure.StringToSliceHookFunc(","), - mapstructure.StringToTimeDurationHookFunc(), - ctsignals.StringToSignalFunc(), - ), - ErrorUnused: true, - Metadata: &md, - Result: &templateConfig, - }) - if err != nil { - return errors.New("mapstructure decoder creation failed") - } - if err := decoder.Decode(parsed); err != nil { - return err - } - - // parse the keys in the item for the environment variable name - if numberOfKeys := len(item.Keys); numberOfKeys != 1 { - return fmt.Errorf("expected one and only one environment variable name, got %d", numberOfKeys) - } - - // hcl parses this with extra quotes if quoted in config file - environmentVariableName := strings.Trim(item.Keys[0].Token.Text, `"`) - - templateConfig.MapToEnvironmentVariable = pointerutil.StringPtr(environmentVariableName) - - envTemplates = append(envTemplates, &templateConfig) - } - - result.EnvTemplates = envTemplates - return nil -} diff --git a/command/agent/config/config_test.go b/command/agent/config/config_test.go index 3be1ab33ab3e56..e8cdc2473421d1 100644 --- a/command/agent/config/config_test.go +++ b/command/agent/config/config_test.go @@ -1,19 +1,12 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package config import ( "os" - "syscall" "testing" "time" "github.com/go-test/deep" ctconfig "github.com/hashicorp/consul-template/config" - "golang.org/x/exp/slices" - - "github.com/hashicorp/vault/command/agentproxyshared" "github.com/hashicorp/vault/internalshared/configutil" "github.com/hashicorp/vault/sdk/helper/pointerutil" ) @@ -84,7 +77,7 @@ func TestLoadConfigFile_AgentCache(t *testing.T) { UseAutoAuthToken: true, UseAutoAuthTokenRaw: true, ForceAutoAuthToken: false, - Persist: &agentproxyshared.PersistConfig{ + Persist: &Persist{ Type: "kubernetes", Path: "/vault/agent-cache/", KeepAfterImport: true, @@ -189,7 +182,7 @@ func TestLoadConfigDir_AgentCache(t *testing.T) { UseAutoAuthToken: true, UseAutoAuthTokenRaw: true, ForceAutoAuthToken: false, - Persist: &agentproxyshared.PersistConfig{ + Persist: &Persist{ Type: "kubernetes", Path: "/vault/agent-cache/", KeepAfterImport: true, @@ -389,7 +382,7 @@ func TestLoadConfigFile_AgentCache_NoListeners(t *testing.T) { UseAutoAuthToken: true, UseAutoAuthTokenRaw: true, ForceAutoAuthToken: false, - Persist: &agentproxyshared.PersistConfig{ + Persist: &Persist{ Type: "kubernetes", Path: "/vault/agent-cache/", KeepAfterImport: true, @@ -617,7 +610,8 @@ func TestLoadConfigFile_AgentCache_NoAutoAuth(t *testing.T) { } expected := &Config{ - Cache: &Cache{}, + APIProxy: &APIProxy{}, + Cache: &Cache{}, SharedConfig: &configutil.SharedConfig{ PidFile: "./pidfile", Listeners: []*configutil.Listener{ @@ -934,6 +928,10 @@ func TestLoadConfigFile_AgentCache_AutoAuth_False(t *testing.T) { }, }, }, + APIProxy: &APIProxy{ + UseAutoAuthToken: false, + ForceAutoAuthToken: false, + }, Cache: &Cache{ UseAutoAuthToken: false, UseAutoAuthTokenRaw: "false", @@ -954,8 +952,9 @@ func TestLoadConfigFile_AgentCache_Persist(t *testing.T) { } expected := &Config{ + APIProxy: &APIProxy{}, Cache: &Cache{ - Persist: &agentproxyshared.PersistConfig{ + Persist: &Persist{ Type: "kubernetes", Path: "/vault/agent-cache/", KeepAfterImport: false, @@ -1246,43 +1245,6 @@ func TestLoadConfigFile_Template_NoSinks(t *testing.T) { } } -// TestLoadConfigFile_Template_WithCache tests ensures that cache {} stanza is -// permitted in vault agent configuration with template(s) -func TestLoadConfigFile_Template_WithCache(t *testing.T) { - config, err := LoadConfigFile("./test-fixtures/config-template-with-cache.hcl") - if err != nil { - t.Fatalf("err: %s", err) - } - - expected := &Config{ - SharedConfig: &configutil.SharedConfig{ - PidFile: "./pidfile", - }, - AutoAuth: &AutoAuth{ - Method: &Method{ - Type: "aws", - MountPath: "auth/aws", - Namespace: "my-namespace/", - Config: map[string]interface{}{ - "role": "foobar", - }, - }, - }, - Cache: &Cache{}, - Templates: []*ctconfig.TemplateConfig{ - { - Source: pointerutil.StringPtr("/path/on/disk/to/template.ctmpl"), - Destination: pointerutil.StringPtr("/path/on/disk/where/template/will/render.txt"), - }, - }, - } - - config.Prune() - if diff := deep.Equal(config, expected); diff != nil { - t.Fatal(diff) - } -} - func TestLoadConfigFile_Vault_Retry(t *testing.T) { config, err := LoadConfigFile("./test-fixtures/config-vault-retry.hcl") if err != nil { @@ -1390,6 +1352,7 @@ func TestLoadConfigFile_EnforceConsistency(t *testing.T) { }, PidFile: "", }, + APIProxy: &APIProxy{}, Cache: &Cache{ EnforceConsistency: "always", WhenInconsistent: "retry", @@ -2142,189 +2105,3 @@ func TestLoadConfigFile_Bad_Value_Disable_Keep_Alives(t *testing.T) { t.Fatal("should have error, it didn't") } } - -// TestLoadConfigFile_EnvTemplates_Simple loads and validates an env_template config -func TestLoadConfigFile_EnvTemplates_Simple(t *testing.T) { - cfg, err := LoadConfigFile("./test-fixtures/config-env-templates-simple.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := cfg.ValidateConfig(); err != nil { - t.Fatalf("validation error: %s", err) - } - - expectedKey := "MY_DATABASE_USER" - found := false - for _, envTemplate := range cfg.EnvTemplates { - if *envTemplate.MapToEnvironmentVariable == expectedKey { - found = true - } - } - if !found { - t.Fatalf("expected environment variable name to be populated") - } -} - -// TestLoadConfigFile_EnvTemplates_Complex loads and validates an env_template config -func TestLoadConfigFile_EnvTemplates_Complex(t *testing.T) { - cfg, err := LoadConfigFile("./test-fixtures/config-env-templates-complex.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := cfg.ValidateConfig(); err != nil { - t.Fatalf("validation error: %s", err) - } - - expectedKeys := []string{ - "FOO_PASSWORD", - "FOO_USER", - } - - envExists := func(key string) bool { - for _, envTmpl := range cfg.EnvTemplates { - if *envTmpl.MapToEnvironmentVariable == key { - return true - } - } - return false - } - - for _, expected := range expectedKeys { - if !envExists(expected) { - t.Fatalf("expected environment variable %s", expected) - } - } -} - -// TestLoadConfigFile_EnvTemplates_WithSource loads and validates an -// env_template config with "source" instead of "contents" -func TestLoadConfigFile_EnvTemplates_WithSource(t *testing.T) { - cfg, err := LoadConfigFile("./test-fixtures/config-env-templates-with-source.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := cfg.ValidateConfig(); err != nil { - t.Fatalf("validation error: %s", err) - } -} - -// TestLoadConfigFile_EnvTemplates_NoName ensures that env_template with no name triggers an error -func TestLoadConfigFile_EnvTemplates_NoName(t *testing.T) { - _, err := LoadConfigFile("./test-fixtures/bad-config-env-templates-no-name.hcl") - if err == nil { - t.Fatalf("expected error") - } -} - -// TestLoadConfigFile_EnvTemplates_ExecInvalidSignal ensures that an invalid signal triggers an error -func TestLoadConfigFile_EnvTemplates_ExecInvalidSignal(t *testing.T) { - _, err := LoadConfigFile("./test-fixtures/bad-config-env-templates-invalid-signal.hcl") - if err == nil { - t.Fatalf("expected error") - } -} - -// TestLoadConfigFile_EnvTemplates_ExecSimple validates the exec section with default parameters -func TestLoadConfigFile_EnvTemplates_ExecSimple(t *testing.T) { - cfg, err := LoadConfigFile("./test-fixtures/config-env-templates-simple.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := cfg.ValidateConfig(); err != nil { - t.Fatalf("validation error: %s", err) - } - - expectedCmd := []string{"/path/to/my/app", "arg1", "arg2"} - if !slices.Equal(cfg.Exec.Command, expectedCmd) { - t.Fatal("exec.command does not have expected value") - } - - // check defaults - if cfg.Exec.RestartOnSecretChanges != "always" { - t.Fatalf("expected cfg.Exec.RestartOnSecretChanges to be 'always', got '%s'", cfg.Exec.RestartOnSecretChanges) - } - - if cfg.Exec.RestartStopSignal != syscall.SIGTERM { - t.Fatalf("expected cfg.Exec.RestartStopSignal to be 'syscall.SIGTERM', got '%s'", cfg.Exec.RestartStopSignal) - } -} - -// TestLoadConfigFile_EnvTemplates_ExecComplex validates the exec section with non-default parameters -func TestLoadConfigFile_EnvTemplates_ExecComplex(t *testing.T) { - cfg, err := LoadConfigFile("./test-fixtures/config-env-templates-complex.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := cfg.ValidateConfig(); err != nil { - t.Fatalf("validation error: %s", err) - } - - if !slices.Equal(cfg.Exec.Command, []string{"env"}) { - t.Fatal("exec.command does not have expected value") - } - - if cfg.Exec.RestartOnSecretChanges != "never" { - t.Fatalf("expected cfg.Exec.RestartOnSecretChanges to be 'never', got %q", cfg.Exec.RestartOnSecretChanges) - } - - if cfg.Exec.RestartStopSignal != syscall.SIGINT { - t.Fatalf("expected cfg.Exec.RestartStopSignal to be 'syscall.SIGINT', got %q", cfg.Exec.RestartStopSignal) - } -} - -// TestLoadConfigFile_Bad_EnvTemplates_MissingExec ensures that ValidateConfig -// errors when "env_template" stanza(s) are specified but "exec" is missing -func TestLoadConfigFile_Bad_EnvTemplates_MissingExec(t *testing.T) { - config, err := LoadConfigFile("./test-fixtures/bad-config-env-templates-missing-exec.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := config.ValidateConfig(); err == nil { - t.Fatal("expected an error from ValidateConfig: exec section is missing") - } -} - -// TestLoadConfigFile_Bad_EnvTemplates_WithProxy ensures that ValidateConfig -// errors when both env_template and api_proxy stanzas are present -func TestLoadConfigFile_Bad_EnvTemplates_WithProxy(t *testing.T) { - config, err := LoadConfigFile("./test-fixtures/bad-config-env-templates-with-proxy.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := config.ValidateConfig(); err == nil { - t.Fatal("expected an error from ValidateConfig: listener / api_proxy are not compatible with env_template") - } -} - -// TestLoadConfigFile_Bad_EnvTemplates_WithFileTemplates ensures that -// ValidateConfig errors when both env_template and template stanzas are present -func TestLoadConfigFile_Bad_EnvTemplates_WithFileTemplates(t *testing.T) { - config, err := LoadConfigFile("./test-fixtures/bad-config-env-templates-with-file-templates.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := config.ValidateConfig(); err == nil { - t.Fatal("expected an error from ValidateConfig: file template stanza is not compatible with env_template") - } -} - -// TestLoadConfigFile_Bad_EnvTemplates_DisalowedFields ensure that -// ValidateConfig errors for disalowed env_template fields -func TestLoadConfigFile_Bad_EnvTemplates_DisalowedFields(t *testing.T) { - config, err := LoadConfigFile("./test-fixtures/bad-config-env-templates-disalowed-fields.hcl") - if err != nil { - t.Fatalf("error loading config file: %s", err) - } - - if err := config.ValidateConfig(); err == nil { - t.Fatal("expected an error from ValidateConfig: disallowed fields specified in env_template") - } -} diff --git a/command/agent/config/test-fixtures/bad-config-api_proxy-cache.hcl b/command/agent/config/test-fixtures/bad-config-api_proxy-cache.hcl index 7d2bf5c2d3a962..ae79293b483a5c 100644 --- a/command/agent/config/test-fixtures/bad-config-api_proxy-cache.hcl +++ b/command/agent/config/test-fixtures/bad-config-api_proxy-cache.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/bad-config-auto_auth-nosinks-nocache-notemplates.hcl b/command/agent/config/test-fixtures/bad-config-auto_auth-nosinks-nocache-notemplates.hcl index d3d5d426695ae6..93e31aad4d7370 100644 --- a/command/agent/config/test-fixtures/bad-config-auto_auth-nosinks-nocache-notemplates.hcl +++ b/command/agent/config/test-fixtures/bad-config-auto_auth-nosinks-nocache-notemplates.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/bad-config-auto_auth-wrapped-multiple-sinks.hcl b/command/agent/config/test-fixtures/bad-config-auto_auth-wrapped-multiple-sinks.hcl index 5c2b3fb79a7928..9a491fa4efc256 100644 --- a/command/agent/config/test-fixtures/bad-config-auto_auth-wrapped-multiple-sinks.hcl +++ b/command/agent/config/test-fixtures/bad-config-auto_auth-wrapped-multiple-sinks.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/bad-config-cache-auto_auth-method-wrapping.hcl b/command/agent/config/test-fixtures/bad-config-cache-auto_auth-method-wrapping.hcl index 8a39837fa721b8..5821c1b59f8395 100644 --- a/command/agent/config/test-fixtures/bad-config-cache-auto_auth-method-wrapping.hcl +++ b/command/agent/config/test-fixtures/bad-config-cache-auto_auth-method-wrapping.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/bad-config-cache-force-token-no-auth-method.hcl b/command/agent/config/test-fixtures/bad-config-cache-force-token-no-auth-method.hcl index d1cae751297073..e2c8b328eb02dd 100644 --- a/command/agent/config/test-fixtures/bad-config-cache-force-token-no-auth-method.hcl +++ b/command/agent/config/test-fixtures/bad-config-cache-force-token-no-auth-method.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" cache { diff --git a/command/agent/config/test-fixtures/bad-config-cache-inconsistent-auto_auth.hcl b/command/agent/config/test-fixtures/bad-config-cache-inconsistent-auto_auth.hcl index 38b9c2c12207b0..5029b8d376416d 100644 --- a/command/agent/config/test-fixtures/bad-config-cache-inconsistent-auto_auth.hcl +++ b/command/agent/config/test-fixtures/bad-config-cache-inconsistent-auto_auth.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" cache { diff --git a/command/agent/config/test-fixtures/bad-config-cache-no-listeners.hcl b/command/agent/config/test-fixtures/bad-config-cache-no-listeners.hcl index 9112183ea84270..9d8110e2d67c63 100644 --- a/command/agent/config/test-fixtures/bad-config-cache-no-listeners.hcl +++ b/command/agent/config/test-fixtures/bad-config-cache-no-listeners.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" cache { diff --git a/command/agent/config/test-fixtures/bad-config-disable-idle-connections.hcl b/command/agent/config/test-fixtures/bad-config-disable-idle-connections.hcl index 34c292e74f9bbb..c13c82520ee6b2 100644 --- a/command/agent/config/test-fixtures/bad-config-disable-idle-connections.hcl +++ b/command/agent/config/test-fixtures/bad-config-disable-idle-connections.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_idle_connections = ["foo","caching","templating"] diff --git a/command/agent/config/test-fixtures/bad-config-disable-keep-alives.hcl b/command/agent/config/test-fixtures/bad-config-disable-keep-alives.hcl index 087e2ffa010d26..3f1b9f0a198ef6 100644 --- a/command/agent/config/test-fixtures/bad-config-disable-keep-alives.hcl +++ b/command/agent/config/test-fixtures/bad-config-disable-keep-alives.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_keep_alives = ["foo","caching","templating"] diff --git a/command/agent/config/test-fixtures/bad-config-env-templates-disalowed-fields.hcl b/command/agent/config/test-fixtures/bad-config-env-templates-disalowed-fields.hcl deleted file mode 100644 index 22ad96c5cf9305..00000000000000 --- a/command/agent/config/test-fixtures/bad-config-env-templates-disalowed-fields.hcl +++ /dev/null @@ -1,33 +0,0 @@ -auto_auth { - - method { - type = "token_file" - - config { - token_file_path = "/Users/avean/.vault-token" - } - } -} - -template_config { - static_secret_render_interval = "5m" - exit_on_retry_failure = true -} - -vault { - address = "http://localhost:8200" -} - -env_template "FOO_PASSWORD" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.password }}{{ end }}" - - # Error: destination and create_dest_dirs are not allowed in env_template - destination = "/path/on/disk/where/template/will/render.txt" - create_dest_dirs = true -} - -exec { - command = ["./my-app", "arg1", "arg2"] - restart_on_secret_changes = "always" - restart_stop_signal = "SIGTERM" -} diff --git a/command/agent/config/test-fixtures/bad-config-env-templates-invalid-signal.hcl b/command/agent/config/test-fixtures/bad-config-env-templates-invalid-signal.hcl deleted file mode 100644 index e8d822d64b0d48..00000000000000 --- a/command/agent/config/test-fixtures/bad-config-env-templates-invalid-signal.hcl +++ /dev/null @@ -1,26 +0,0 @@ -auto_auth { - - method { - type = "token_file" - - config { - token_file_path = "/home/username/.vault-token" - } - } -} - -vault { - address = "http://localhost:8200" -} - -env_template "FOO" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.lock }}{{ end }}" - error_on_missing_key = false -} - - -exec { - command = ["env"] - restart_on_secret_changes = "never" - restart_stop_signal = "notasignal" -} diff --git a/command/agent/config/test-fixtures/bad-config-env-templates-missing-exec.hcl b/command/agent/config/test-fixtures/bad-config-env-templates-missing-exec.hcl deleted file mode 100644 index 6283e56a9a467a..00000000000000 --- a/command/agent/config/test-fixtures/bad-config-env-templates-missing-exec.hcl +++ /dev/null @@ -1,30 +0,0 @@ -auto_auth { - - method { - type = "token_file" - - config { - token_file_path = "/Users/avean/.vault-token" - } - } -} - -template_config { - static_secret_render_interval = "5m" - exit_on_retry_failure = true -} - -vault { - address = "http://localhost:8200" -} - -env_template "FOO_PASSWORD" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.password }}{{ end }}" - error_on_missing_key = false -} -env_template "FOO_USER" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.user }}{{ end }}" - error_on_missing_key = false -} - -# Error: missing a required "exec" section! diff --git a/command/agent/config/test-fixtures/bad-config-env-templates-no-name.hcl b/command/agent/config/test-fixtures/bad-config-env-templates-no-name.hcl deleted file mode 100644 index f77f20c11227ef..00000000000000 --- a/command/agent/config/test-fixtures/bad-config-env-templates-no-name.hcl +++ /dev/null @@ -1,26 +0,0 @@ -auto_auth { - - method { - type = "token_file" - - config { - token_file_path = "/home/username/.vault-token" - } - } -} - -vault { - address = "http://localhost:8200" -} - -env_template { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.lock }}{{ end }}" - error_on_missing_key = false -} - - -exec { - command = ["env"] - restart_on_secret_changes = "never" - restart_stop_signal = "SIGTERM" -} diff --git a/command/agent/config/test-fixtures/bad-config-env-templates-with-file-templates.hcl b/command/agent/config/test-fixtures/bad-config-env-templates-with-file-templates.hcl deleted file mode 100644 index 811b10d5208f28..00000000000000 --- a/command/agent/config/test-fixtures/bad-config-env-templates-with-file-templates.hcl +++ /dev/null @@ -1,40 +0,0 @@ -auto_auth { - - method { - type = "token_file" - - config { - token_file_path = "/Users/avean/.vault-token" - } - } -} - -template_config { - static_secret_render_interval = "5m" - exit_on_retry_failure = true -} - -vault { - address = "http://localhost:8200" -} - -# Error: template is incompatible with env_template! -template { - source = "/path/on/disk/to/template.ctmpl" - destination = "/path/on/disk/where/template/will/render.txt" -} - -env_template "FOO_PASSWORD" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.password }}{{ end }}" - error_on_missing_key = false -} -env_template "FOO_USER" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.user }}{{ end }}" - error_on_missing_key = false -} - -exec { - command = ["./my-app", "arg1", "arg2"] - restart_on_secret_changes = "always" - restart_stop_signal = "SIGTERM" -} diff --git a/command/agent/config/test-fixtures/bad-config-env-templates-with-proxy.hcl b/command/agent/config/test-fixtures/bad-config-env-templates-with-proxy.hcl deleted file mode 100644 index 3c6095ddeae827..00000000000000 --- a/command/agent/config/test-fixtures/bad-config-env-templates-with-proxy.hcl +++ /dev/null @@ -1,47 +0,0 @@ -auto_auth { - - method { - type = "token_file" - - config { - token_file_path = "/Users/avean/.vault-token" - } - } -} - -template_config { - static_secret_render_interval = "5m" - exit_on_retry_failure = true -} - -vault { - address = "http://localhost:8200" -} - -env_template "FOO_PASSWORD" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.password }}{{ end }}" - error_on_missing_key = false -} -env_template "FOO_USER" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.user }}{{ end }}" - error_on_missing_key = false -} - -exec { - command = ["./my-app", "arg1", "arg2"] - restart_on_secret_changes = "always" - restart_stop_signal = "SIGTERM" -} - -# Error: api_proxy is incompatible with env_template -api_proxy { - use_auto_auth_token = "force" - enforce_consistency = "always" - when_inconsistent = "forward" -} - -# Error: listener is incompatible with env_template -listener "tcp" { - address = "127.0.0.1:8300" - tls_disable = true -} diff --git a/command/agent/config/test-fixtures/bad-config-method-wrapping-and-sink-wrapping.hcl b/command/agent/config/test-fixtures/bad-config-method-wrapping-and-sink-wrapping.hcl index cb9696dfb3f169..7a375737161a86 100644 --- a/command/agent/config/test-fixtures/bad-config-method-wrapping-and-sink-wrapping.hcl +++ b/command/agent/config/test-fixtures/bad-config-method-wrapping-and-sink-wrapping.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-api_proxy-auto_auth-all-api_proxy-config.hcl b/command/agent/config/test-fixtures/config-api_proxy-auto_auth-all-api_proxy-config.hcl index a3e4e5b99a2a9f..b486418eedef86 100644 --- a/command/agent/config/test-fixtures/config-api_proxy-auto_auth-all-api_proxy-config.hcl +++ b/command/agent/config/test-fixtures/config-api_proxy-auto_auth-all-api_proxy-config.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-cache-auto_auth-false.hcl b/command/agent/config/test-fixtures/config-cache-auto_auth-false.hcl index 252216e21d7fc2..1a2fd91d6c70e0 100644 --- a/command/agent/config/test-fixtures/config-cache-auto_auth-false.hcl +++ b/command/agent/config/test-fixtures/config-cache-auto_auth-false.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-cache-auto_auth-force.hcl b/command/agent/config/test-fixtures/config-cache-auto_auth-force.hcl index 429645527a15a4..9aad89cdd2aa4a 100644 --- a/command/agent/config/test-fixtures/config-cache-auto_auth-force.hcl +++ b/command/agent/config/test-fixtures/config-cache-auto_auth-force.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-cache-auto_auth-no-sink.hcl b/command/agent/config/test-fixtures/config-cache-auto_auth-no-sink.hcl index 80486b346b1c24..b3dc1383f67624 100644 --- a/command/agent/config/test-fixtures/config-cache-auto_auth-no-sink.hcl +++ b/command/agent/config/test-fixtures/config-cache-auto_auth-no-sink.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-cache-auto_auth-true.hcl b/command/agent/config/test-fixtures/config-cache-auto_auth-true.hcl index cebcdfbc586829..5a46d1b93c5c74 100644 --- a/command/agent/config/test-fixtures/config-cache-auto_auth-true.hcl +++ b/command/agent/config/test-fixtures/config-cache-auto_auth-true.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-cache-embedded-type.hcl b/command/agent/config/test-fixtures/config-cache-embedded-type.hcl index 4ea525753b6c8a..b09a978c66ae3b 100644 --- a/command/agent/config/test-fixtures/config-cache-embedded-type.hcl +++ b/command/agent/config/test-fixtures/config-cache-embedded-type.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-cache-no-auto_auth.hcl b/command/agent/config/test-fixtures/config-cache-no-auto_auth.hcl index 45c71412bdd5e6..7a2a57f683aecc 100644 --- a/command/agent/config/test-fixtures/config-cache-no-auto_auth.hcl +++ b/command/agent/config/test-fixtures/config-cache-no-auto_auth.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" cache { diff --git a/command/agent/config/test-fixtures/config-cache-no-listeners.hcl b/command/agent/config/test-fixtures/config-cache-no-listeners.hcl index 3e0abfb6b9eb7b..d7176e0aa539ab 100644 --- a/command/agent/config/test-fixtures/config-cache-no-listeners.hcl +++ b/command/agent/config/test-fixtures/config-cache-no-listeners.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-cache-persist-empty-type.hcl b/command/agent/config/test-fixtures/config-cache-persist-empty-type.hcl index f40715e6e418f6..55f1d64801612e 100644 --- a/command/agent/config/test-fixtures/config-cache-persist-empty-type.hcl +++ b/command/agent/config/test-fixtures/config-cache-persist-empty-type.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" cache { diff --git a/command/agent/config/test-fixtures/config-cache-persist-false.hcl b/command/agent/config/test-fixtures/config-cache-persist-false.hcl index 77bb926cffdfc1..5ab7f04499118a 100644 --- a/command/agent/config/test-fixtures/config-cache-persist-false.hcl +++ b/command/agent/config/test-fixtures/config-cache-persist-false.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" cache { diff --git a/command/agent/config/test-fixtures/config-cache.hcl b/command/agent/config/test-fixtures/config-cache.hcl index 87fa5afad86189..05f321a95c4ce9 100644 --- a/command/agent/config/test-fixtures/config-cache.hcl +++ b/command/agent/config/test-fixtures/config-cache.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-consistency-apiproxy.hcl b/command/agent/config/test-fixtures/config-consistency-apiproxy.hcl index c2e662a67206a3..d116964a1b5034 100644 --- a/command/agent/config/test-fixtures/config-consistency-apiproxy.hcl +++ b/command/agent/config/test-fixtures/config-consistency-apiproxy.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - api_proxy { enforce_consistency = "always" when_inconsistent = "retry" diff --git a/command/agent/config/test-fixtures/config-consistency.hcl b/command/agent/config/test-fixtures/config-consistency.hcl index 535181197c6c1f..d57e0557362e16 100644 --- a/command/agent/config/test-fixtures/config-consistency.hcl +++ b/command/agent/config/test-fixtures/config-consistency.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - cache { enforce_consistency = "always" when_inconsistent = "retry" diff --git a/command/agent/config/test-fixtures/config-dir-auto-auth-and-listener/config1.hcl b/command/agent/config/test-fixtures/config-dir-auto-auth-and-listener/config1.hcl index c900df6b1aa3fa..849aab288e3242 100644 --- a/command/agent/config/test-fixtures/config-dir-auto-auth-and-listener/config1.hcl +++ b/command/agent/config/test-fixtures/config-dir-auto-auth-and-listener/config1.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-dir-auto-auth-and-listener/config2.hcl b/command/agent/config/test-fixtures/config-dir-auto-auth-and-listener/config2.hcl index 2e942da663818a..e1efa956298d18 100644 --- a/command/agent/config/test-fixtures/config-dir-auto-auth-and-listener/config2.hcl +++ b/command/agent/config/test-fixtures/config-dir-auto-auth-and-listener/config2.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" listener "tcp" { diff --git a/command/agent/config/test-fixtures/config-dir-cache/config-cache1.hcl b/command/agent/config/test-fixtures/config-dir-cache/config-cache1.hcl index 767cdd9e4a5f2e..8dab7ed913ae3c 100644 --- a/command/agent/config/test-fixtures/config-dir-cache/config-cache1.hcl +++ b/command/agent/config/test-fixtures/config-dir-cache/config-cache1.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-dir-cache/config-cache2.hcl b/command/agent/config/test-fixtures/config-dir-cache/config-cache2.hcl index f4d0f47f961202..f8398188c4f3ca 100644 --- a/command/agent/config/test-fixtures/config-dir-cache/config-cache2.hcl +++ b/command/agent/config/test-fixtures/config-dir-cache/config-cache2.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - cache { use_auto_auth_token = true persist = { diff --git a/command/agent/config/test-fixtures/config-dir-vault-block/config1.hcl b/command/agent/config/test-fixtures/config-dir-vault-block/config1.hcl index 18729533f44145..eeec3b8f2c1ac6 100644 --- a/command/agent/config/test-fixtures/config-dir-vault-block/config1.hcl +++ b/command/agent/config/test-fixtures/config-dir-vault-block/config1.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - vault { address = "http://127.0.0.1:1111" ca_cert = "config_ca_cert" diff --git a/command/agent/config/test-fixtures/config-dir-vault-block/config2.hcl b/command/agent/config/test-fixtures/config-dir-vault-block/config2.hcl index c900df6b1aa3fa..849aab288e3242 100644 --- a/command/agent/config/test-fixtures/config-dir-vault-block/config2.hcl +++ b/command/agent/config/test-fixtures/config-dir-vault-block/config2.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-disable-idle-connections-all.hcl b/command/agent/config/test-fixtures/config-disable-idle-connections-all.hcl index f312d420c8e78d..94e8cc827f3ca7 100644 --- a/command/agent/config/test-fixtures/config-disable-idle-connections-all.hcl +++ b/command/agent/config/test-fixtures/config-disable-idle-connections-all.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_idle_connections = ["auto-auth","caching","templating","proxying"] diff --git a/command/agent/config/test-fixtures/config-disable-idle-connections-auto-auth.hcl b/command/agent/config/test-fixtures/config-disable-idle-connections-auto-auth.hcl index abb1756697fa30..1a63b20480d4f7 100644 --- a/command/agent/config/test-fixtures/config-disable-idle-connections-auto-auth.hcl +++ b/command/agent/config/test-fixtures/config-disable-idle-connections-auto-auth.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_idle_connections = ["auto-auth"] diff --git a/command/agent/config/test-fixtures/config-disable-idle-connections-caching.hcl b/command/agent/config/test-fixtures/config-disable-idle-connections-caching.hcl index 95a36e925a58b1..30d0806c033710 100644 --- a/command/agent/config/test-fixtures/config-disable-idle-connections-caching.hcl +++ b/command/agent/config/test-fixtures/config-disable-idle-connections-caching.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_idle_connections = ["caching"] diff --git a/command/agent/config/test-fixtures/config-disable-idle-connections-empty.hcl b/command/agent/config/test-fixtures/config-disable-idle-connections-empty.hcl index 3e490bfbc4c2fc..eb95310cedfff1 100644 --- a/command/agent/config/test-fixtures/config-disable-idle-connections-empty.hcl +++ b/command/agent/config/test-fixtures/config-disable-idle-connections-empty.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_idle_connections = [] diff --git a/command/agent/config/test-fixtures/config-disable-idle-connections-proxying.hcl b/command/agent/config/test-fixtures/config-disable-idle-connections-proxying.hcl index 88da2effcc7182..8c2c6db6740015 100644 --- a/command/agent/config/test-fixtures/config-disable-idle-connections-proxying.hcl +++ b/command/agent/config/test-fixtures/config-disable-idle-connections-proxying.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_idle_connections = ["proxying"] diff --git a/command/agent/config/test-fixtures/config-disable-idle-connections-templating.hcl b/command/agent/config/test-fixtures/config-disable-idle-connections-templating.hcl index 6e51c91a0e4b41..922377fc82a96e 100644 --- a/command/agent/config/test-fixtures/config-disable-idle-connections-templating.hcl +++ b/command/agent/config/test-fixtures/config-disable-idle-connections-templating.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_idle_connections = ["templating"] diff --git a/command/agent/config/test-fixtures/config-disable-keep-alives-all.hcl b/command/agent/config/test-fixtures/config-disable-keep-alives-all.hcl index 8c1c6d58282ab1..6e498f756d3920 100644 --- a/command/agent/config/test-fixtures/config-disable-keep-alives-all.hcl +++ b/command/agent/config/test-fixtures/config-disable-keep-alives-all.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_keep_alives = ["auto-auth","caching","templating","proxying"] diff --git a/command/agent/config/test-fixtures/config-disable-keep-alives-auto-auth.hcl b/command/agent/config/test-fixtures/config-disable-keep-alives-auto-auth.hcl index d77dfb27837854..11393bfb57a600 100644 --- a/command/agent/config/test-fixtures/config-disable-keep-alives-auto-auth.hcl +++ b/command/agent/config/test-fixtures/config-disable-keep-alives-auto-auth.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_keep_alives = ["auto-auth"] diff --git a/command/agent/config/test-fixtures/config-disable-keep-alives-caching.hcl b/command/agent/config/test-fixtures/config-disable-keep-alives-caching.hcl index 386267e3e8014f..5712296924edff 100644 --- a/command/agent/config/test-fixtures/config-disable-keep-alives-caching.hcl +++ b/command/agent/config/test-fixtures/config-disable-keep-alives-caching.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_keep_alives = ["caching"] diff --git a/command/agent/config/test-fixtures/config-disable-keep-alives-empty.hcl b/command/agent/config/test-fixtures/config-disable-keep-alives-empty.hcl index b4239a5261ed55..8cddcebd8f1065 100644 --- a/command/agent/config/test-fixtures/config-disable-keep-alives-empty.hcl +++ b/command/agent/config/test-fixtures/config-disable-keep-alives-empty.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_keep_alives = [] diff --git a/command/agent/config/test-fixtures/config-disable-keep-alives-proxying.hcl b/command/agent/config/test-fixtures/config-disable-keep-alives-proxying.hcl index 8c82a92b6e1579..8363cb58f13215 100644 --- a/command/agent/config/test-fixtures/config-disable-keep-alives-proxying.hcl +++ b/command/agent/config/test-fixtures/config-disable-keep-alives-proxying.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_keep_alives = ["proxying"] diff --git a/command/agent/config/test-fixtures/config-disable-keep-alives-templating.hcl b/command/agent/config/test-fixtures/config-disable-keep-alives-templating.hcl index 01ec09504544d7..d4731cbd90e2ea 100644 --- a/command/agent/config/test-fixtures/config-disable-keep-alives-templating.hcl +++ b/command/agent/config/test-fixtures/config-disable-keep-alives-templating.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" disable_keep_alives = ["templating"] diff --git a/command/agent/config/test-fixtures/config-embedded-type.hcl b/command/agent/config/test-fixtures/config-embedded-type.hcl index 2ce3b401ca7ed1..4e6dc41f46a8ea 100644 --- a/command/agent/config/test-fixtures/config-embedded-type.hcl +++ b/command/agent/config/test-fixtures/config-embedded-type.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" log_file = "/var/log/vault/vault-agent.log" diff --git a/command/agent/config/test-fixtures/config-env-templates-complex.hcl b/command/agent/config/test-fixtures/config-env-templates-complex.hcl deleted file mode 100644 index 639b1288f24c08..00000000000000 --- a/command/agent/config/test-fixtures/config-env-templates-complex.hcl +++ /dev/null @@ -1,36 +0,0 @@ -auto_auth { - - method { - type = "token_file" - - config { - token_file_path = "/home/username/.vault-token" - } - } -} - -cache {} - -template_config { - static_secret_render_interval = "5m" - exit_on_retry_failure = true -} - -vault { - address = "http://localhost:8200" -} - -env_template "FOO_PASSWORD" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.password }}{{ end }}" - error_on_missing_key = false -} -env_template "FOO_USER" { - contents = "{{ with secret \"secret/data/foo\" }}{{ .Data.data.user }}{{ end }}" - error_on_missing_key = false -} - -exec { - command = ["env"] - restart_on_secret_changes = "never" - restart_stop_signal = "SIGINT" -} diff --git a/command/agent/config/test-fixtures/config-env-templates-simple.hcl b/command/agent/config/test-fixtures/config-env-templates-simple.hcl deleted file mode 100644 index 441563b1e9f0af..00000000000000 --- a/command/agent/config/test-fixtures/config-env-templates-simple.hcl +++ /dev/null @@ -1,18 +0,0 @@ -auto_auth { - - method { - type = "token_file" - - config { - token_file_path = "/Users/avean/.vault-token" - } - } -} - -env_template "MY_DATABASE_USER" { - contents = "{{ with secret \"secret/db-secret\" }}{{ .Data.data.user }}{{ end }}" -} - -exec { - command = ["/path/to/my/app", "arg1", "arg2"] -} diff --git a/command/agent/config/test-fixtures/config-env-templates-with-source.hcl b/command/agent/config/test-fixtures/config-env-templates-with-source.hcl deleted file mode 100644 index d51cb5553a9176..00000000000000 --- a/command/agent/config/test-fixtures/config-env-templates-with-source.hcl +++ /dev/null @@ -1,16 +0,0 @@ -auto_auth { - method { - type = "token_file" - config { - token_file_path = "/home/username/.vault-token" - } - } -} - -env_template "MY_PASSWORD" { - source = "/path/on/disk/to/template.ctmpl" -} - -exec { - command = ["/path/to/my/app", "arg1", "arg2"] -} diff --git a/command/agent/config/test-fixtures/config-method-exit-on-err.hcl b/command/agent/config/test-fixtures/config-method-exit-on-err.hcl index bbda08c01b63ae..c52140102f1c8b 100644 --- a/command/agent/config/test-fixtures/config-method-exit-on-err.hcl +++ b/command/agent/config/test-fixtures/config-method-exit-on-err.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-method-initial-backoff.hcl b/command/agent/config/test-fixtures/config-method-initial-backoff.hcl index b166dabde4183d..6b9343aa4ba629 100644 --- a/command/agent/config/test-fixtures/config-method-initial-backoff.hcl +++ b/command/agent/config/test-fixtures/config-method-initial-backoff.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-method-wrapping.hcl b/command/agent/config/test-fixtures/config-method-wrapping.hcl index 8142a19dd80f06..cbafc5a2459377 100644 --- a/command/agent/config/test-fixtures/config-method-wrapping.hcl +++ b/command/agent/config/test-fixtures/config-method-wrapping.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-template-full-nosink.hcl b/command/agent/config/test-fixtures/config-template-full-nosink.hcl index 579aae1e86ca6c..84edf6f11e3c79 100644 --- a/command/agent/config/test-fixtures/config-template-full-nosink.hcl +++ b/command/agent/config/test-fixtures/config-template-full-nosink.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-template-full.hcl b/command/agent/config/test-fixtures/config-template-full.hcl index b7641cd66403fc..5e5cbc62cd7b76 100644 --- a/command/agent/config/test-fixtures/config-template-full.hcl +++ b/command/agent/config/test-fixtures/config-template-full.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-template-many-nosink.hcl b/command/agent/config/test-fixtures/config-template-many-nosink.hcl index 2f8352d1b00086..e04f77da263e2d 100644 --- a/command/agent/config/test-fixtures/config-template-many-nosink.hcl +++ b/command/agent/config/test-fixtures/config-template-many-nosink.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-template-many.hcl b/command/agent/config/test-fixtures/config-template-many.hcl index 3a3ce77f1f4628..2f6fe7b70b6d9b 100644 --- a/command/agent/config/test-fixtures/config-template-many.hcl +++ b/command/agent/config/test-fixtures/config-template-many.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-template-min-nosink.hcl b/command/agent/config/test-fixtures/config-template-min-nosink.hcl index 064b7a452f6992..12c5a82c5385cd 100644 --- a/command/agent/config/test-fixtures/config-template-min-nosink.hcl +++ b/command/agent/config/test-fixtures/config-template-min-nosink.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-template-min.hcl b/command/agent/config/test-fixtures/config-template-min.hcl index 34435da638c5be..5d37dbefbab1e8 100644 --- a/command/agent/config/test-fixtures/config-template-min.hcl +++ b/command/agent/config/test-fixtures/config-template-min.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-template-with-cache.hcl b/command/agent/config/test-fixtures/config-template-with-cache.hcl deleted file mode 100644 index 8f43b8311eeeb7..00000000000000 --- a/command/agent/config/test-fixtures/config-template-with-cache.hcl +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - -pid_file = "./pidfile" - -auto_auth { - method { - type = "aws" - namespace = "/my-namespace" - - config = { - role = "foobar" - } - } -} - -cache {} - -template { - source = "/path/on/disk/to/template.ctmpl" - destination = "/path/on/disk/where/template/will/render.txt" -} diff --git a/command/agent/config/test-fixtures/config-template_config-empty.hcl b/command/agent/config/test-fixtures/config-template_config-empty.hcl index ac22dcc5cd6cf3..a4f5b3a0938f5a 100644 --- a/command/agent/config/test-fixtures/config-template_config-empty.hcl +++ b/command/agent/config/test-fixtures/config-template_config-empty.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - vault { address = "http://127.0.0.1:1111" retry { diff --git a/command/agent/config/test-fixtures/config-template_config.hcl b/command/agent/config/test-fixtures/config-template_config.hcl index b550890018d4af..5da0e2b9127b93 100644 --- a/command/agent/config/test-fixtures/config-template_config.hcl +++ b/command/agent/config/test-fixtures/config-template_config.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - vault { address = "http://127.0.0.1:1111" retry { diff --git a/command/agent/config/test-fixtures/config-vault-retry-empty.hcl b/command/agent/config/test-fixtures/config-vault-retry-empty.hcl index 72c44e1f97bdc6..e7ab4aa0017f20 100644 --- a/command/agent/config/test-fixtures/config-vault-retry-empty.hcl +++ b/command/agent/config/test-fixtures/config-vault-retry-empty.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config-vault-retry.hcl b/command/agent/config/test-fixtures/config-vault-retry.hcl index 5e4ee234304ec0..0c1cfa19b06467 100644 --- a/command/agent/config/test-fixtures/config-vault-retry.hcl +++ b/command/agent/config/test-fixtures/config-vault-retry.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" auto_auth { diff --git a/command/agent/config/test-fixtures/config.hcl b/command/agent/config/test-fixtures/config.hcl index 18ec360309c99f..ecfb88ae05f98d 100644 --- a/command/agent/config/test-fixtures/config.hcl +++ b/command/agent/config/test-fixtures/config.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - pid_file = "./pidfile" log_file = "/var/log/vault/vault-agent.log" diff --git a/command/agent/doc.go b/command/agent/doc.go index e9f0f0b98fe3c9..0786f5c1d39495 100644 --- a/command/agent/doc.go +++ b/command/agent/doc.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - /* Package agent implements a daemon mode of Vault designed to provide helper features like auto-auth, caching, and templating. diff --git a/command/agent/exec/exec.go b/command/agent/exec/exec.go deleted file mode 100644 index b22e5eb9302af2..00000000000000 --- a/command/agent/exec/exec.go +++ /dev/null @@ -1,332 +0,0 @@ -package exec - -import ( - "context" - "fmt" - "io" - "os" - "sort" - "sync" - "time" - - "github.com/hashicorp/consul-template/child" - ctconfig "github.com/hashicorp/consul-template/config" - "github.com/hashicorp/consul-template/manager" - "github.com/hashicorp/go-hclog" - "golang.org/x/exp/slices" - - "github.com/hashicorp/vault/command/agent/config" - "github.com/hashicorp/vault/command/agent/internal/ctmanager" - "github.com/hashicorp/vault/helper/useragent" - "github.com/hashicorp/vault/sdk/helper/pointerutil" -) - -type childProcessState uint8 - -const ( - childProcessStateNotStarted childProcessState = iota - childProcessStateRunning - childProcessStateRestarting - childProcessStateStopped -) - -type ServerConfig struct { - Logger hclog.Logger - AgentConfig *config.Config - - Namespace string - - // LogLevel is needed to set the internal Consul Template Runner's log level - // to match the log level of Vault Agent. The internal Runner creates it's own - // logger and can't be set externally or copied from the Template Server. - // - // LogWriter is needed to initialize Consul Template's internal logger to use - // the same io.Writer that Vault Agent itself is using. - LogLevel hclog.Level - LogWriter io.Writer -} - -type Server struct { - // config holds the ServerConfig used to create it. It's passed along in other - // methods - config *ServerConfig - - // runner is the consul-template runner - runner *manager.Runner - - // numberOfTemplates is the count of templates determined by consul-template, - // we keep the value to ensure all templates have been rendered before - // starting the child process - // NOTE: each template may have more than one TemplateConfig, so the numbers may not match up - numberOfTemplates int - - logger hclog.Logger - - childProcess *child.Child - childProcessState childProcessState - childProcessLock sync.Mutex - - // exit channel of the child process - childProcessExitCh chan int - - // lastRenderedEnvVars is the cached value of all environment variables - // rendered by the templating engine; it is used for detecting changes - lastRenderedEnvVars []string -} - -type ProcessExitError struct { - ExitCode int -} - -func (e *ProcessExitError) Error() string { - return fmt.Sprintf("process exited with %d", e.ExitCode) -} - -func NewServer(cfg *ServerConfig) *Server { - server := Server{ - logger: cfg.Logger, - config: cfg, - childProcessState: childProcessStateNotStarted, - childProcessExitCh: make(chan int), - } - - return &server -} - -func (s *Server) Run(ctx context.Context, incomingVaultToken chan string) error { - latestToken := new(string) - s.logger.Info("starting exec server") - defer func() { - s.logger.Info("exec server stopped") - }() - - if len(s.config.AgentConfig.EnvTemplates) == 0 || s.config.AgentConfig.Exec == nil { - s.logger.Info("no env templates or exec config, exiting") - <-ctx.Done() - return nil - } - - managerConfig := ctmanager.ManagerConfig{ - AgentConfig: s.config.AgentConfig, - Namespace: s.config.Namespace, - LogLevel: s.config.LogLevel, - LogWriter: s.config.LogWriter, - } - - runnerConfig, err := ctmanager.NewConfig(managerConfig, s.config.AgentConfig.EnvTemplates) - if err != nil { - return fmt.Errorf("template server failed to generate runner config: %w", err) - } - - // We leave this in "dry" mode, as there are no files to render; - // we will get the environment variables rendered contents from the incoming events - s.runner, err = manager.NewRunner(runnerConfig, true) - if err != nil { - return fmt.Errorf("template server failed to create: %w", err) - } - - // prevent the templates from being rendered to stdout in "dry" mode - s.runner.SetOutStream(io.Discard) - - s.numberOfTemplates = len(s.runner.TemplateConfigMapping()) - - // We receive multiple events every staticSecretRenderInterval - // from <-s.runner.TemplateRenderedCh(), one for each secret. Only the last - // event in a batch will contain the latest set of all secrets and the - // corresponding environment variables. This timer will fire after 2 seconds - // unless an event comes in which resets the timer back to 2 seconds. - var debounceTimer *time.Timer - - // capture the errors related to restarting the child process - restartChildProcessErrCh := make(chan error) - - for { - select { - case <-ctx.Done(): - s.runner.Stop() - s.childProcessLock.Lock() - if s.childProcess != nil { - s.childProcess.Stop() - } - s.childProcessState = childProcessStateStopped - s.childProcessLock.Unlock() - return nil - - case token := <-incomingVaultToken: - if token != *latestToken { - s.logger.Info("exec server received new token") - - s.runner.Stop() - *latestToken = token - newTokenConfig := ctconfig.Config{ - Vault: &ctconfig.VaultConfig{ - Token: latestToken, - ClientUserAgent: pointerutil.StringPtr(useragent.AgentTemplatingString()), - }, - } - - // got a new auth token, merge it in with the existing config - runnerConfig = runnerConfig.Merge(&newTokenConfig) - s.runner, err = manager.NewRunner(runnerConfig, true) - if err != nil { - s.logger.Error("template server failed with new Vault token", "error", err) - continue - } - - // prevent the templates from being rendered to stdout in "dry" mode - s.runner.SetOutStream(io.Discard) - - go s.runner.Start() - } - - case err := <-s.runner.ErrCh: - s.logger.Error("template server error", "error", err.Error()) - s.runner.StopImmediately() - - // Return after stopping the runner if exit on retry failure was specified - if s.config.AgentConfig.TemplateConfig != nil && s.config.AgentConfig.TemplateConfig.ExitOnRetryFailure { - return fmt.Errorf("template server: %w", err) - } - - s.runner, err = manager.NewRunner(runnerConfig, true) - if err != nil { - return fmt.Errorf("template server failed to create: %w", err) - } - go s.runner.Start() - - case <-s.runner.TemplateRenderedCh(): - // A template has been rendered, figure out what to do - s.logger.Trace("template rendered") - events := s.runner.RenderEvents() - - // This checks if we've finished rendering the initial set of templates, - // for every consecutive re-render len(events) should equal s.numberOfTemplates - if len(events) < s.numberOfTemplates { - // Not all templates have been rendered yet - continue - } - - // assume the renders are finished, until we find otherwise - doneRendering := true - var renderedEnvVars []string - for _, event := range events { - // This template hasn't been rendered - if event.LastWouldRender.IsZero() { - doneRendering = false - break - } else { - for _, tcfg := range event.TemplateConfigs { - envVar := fmt.Sprintf("%s=%s", *tcfg.MapToEnvironmentVariable, event.Contents) - renderedEnvVars = append(renderedEnvVars, envVar) - } - } - } - if !doneRendering { - continue - } - - // sort the environment variables for a deterministic output and easy comparison - sort.Strings(renderedEnvVars) - - s.logger.Trace("done rendering templates") - - // don't restart the process unless a change is detected - if slices.Equal(s.lastRenderedEnvVars, renderedEnvVars) { - continue - } - - s.lastRenderedEnvVars = renderedEnvVars - - s.logger.Debug("detected a change in the environment variables: restarting the child process") - - // if a timer exists, stop it - if debounceTimer != nil { - debounceTimer.Stop() - } - debounceTimer = time.AfterFunc(2*time.Second, func() { - if err := s.restartChildProcess(renderedEnvVars); err != nil { - restartChildProcessErrCh <- fmt.Errorf("unable to restart the child process: %w", err) - } - }) - - case err := <-restartChildProcessErrCh: - // catch the error from restarting - return err - - case exitCode := <-s.childProcessExitCh: - // process exited on its own - return &ProcessExitError{ExitCode: exitCode} - } - } -} - -func (s *Server) restartChildProcess(newEnvVars []string) error { - s.childProcessLock.Lock() - defer s.childProcessLock.Unlock() - - switch s.config.AgentConfig.Exec.RestartOnSecretChanges { - case "always": - if s.childProcessState == childProcessStateRunning { - // process is running, need to kill it first - s.logger.Info("stopping process", "process_id", s.childProcess.Pid()) - s.childProcessState = childProcessStateRestarting - s.childProcess.Stop() - } - case "never": - if s.childProcessState == childProcessStateRunning { - s.logger.Info("detected update, but not restarting process", "process_id", s.childProcess.Pid()) - return nil - } - default: - return fmt.Errorf("invalid value for restart-on-secret-changes: %q", s.config.AgentConfig.Exec.RestartOnSecretChanges) - } - - args, subshell, err := child.CommandPrep(s.config.AgentConfig.Exec.Command) - if err != nil { - return fmt.Errorf("unable to parse command: %w", err) - } - - childInput := &child.NewInput{ - Stdin: os.Stdin, - Stdout: os.Stdout, - Stderr: os.Stderr, - Command: args[0], - Args: args[1:], - Timeout: 0, // let it run forever - Env: append(os.Environ(), newEnvVars...), - ReloadSignal: nil, // can't reload w/ new env vars - KillSignal: s.config.AgentConfig.Exec.RestartStopSignal, - KillTimeout: 30 * time.Second, - Splay: 0, - Setpgid: subshell, - Logger: s.logger.StandardLogger(nil), - } - - proc, err := child.New(childInput) - if err != nil { - return err - } - s.childProcess = proc - - if err := s.childProcess.Start(); err != nil { - return fmt.Errorf("error starting the child process: %w", err) - } - - s.childProcessState = childProcessStateRunning - - // Listen if the child process exits and bubble it up to the main loop. - // - // NOTE: this must be invoked after child.Start() to avoid a potential - // race condition with ExitCh not being initialized. - go func() { - select { - case exitCode, ok := <-proc.ExitCh(): - // ignore ExitCh channel closures caused by our restarts - if ok { - s.childProcessExitCh <- exitCode - } - } - }() - - return nil -} diff --git a/command/agent/exec/exec_test.go b/command/agent/exec/exec_test.go deleted file mode 100644 index 3c13c34a9f5167..00000000000000 --- a/command/agent/exec/exec_test.go +++ /dev/null @@ -1,379 +0,0 @@ -package exec - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "net/http" - "net/http/httptest" - "os" - "os/exec" - "path/filepath" - "strconv" - "syscall" - "testing" - "time" - - ctconfig "github.com/hashicorp/consul-template/config" - "github.com/hashicorp/go-hclog" - "github.com/hashicorp/go-retryablehttp" - - "github.com/hashicorp/vault/command/agent/config" - "github.com/hashicorp/vault/sdk/helper/logging" - "github.com/hashicorp/vault/sdk/helper/pointerutil" -) - -func fakeVaultServer(t *testing.T) *httptest.Server { - t.Helper() - - firstRequest := true - - mux := http.NewServeMux() - mux.HandleFunc("/v1/kv/my-app/creds", func(w http.ResponseWriter, r *http.Request) { - // change the password on the second request to re-render the template - var password string - - if firstRequest { - password = "s3cr3t" - } else { - password = "s3cr3t-two" - } - - firstRequest = false - - fmt.Fprintf(w, `{ - "request_id": "8af096e9-518c-7351-eff5-5ba20554b21f", - "lease_id": "", - "renewable": false, - "lease_duration": 0, - "data": { - "data": { - "password": "%s", - "user": "app-user" - }, - "metadata": { - "created_time": "2019-10-07T22:18:44.233247Z", - "deletion_time": "", - "destroyed": false, - "version": 3 - } - }, - "wrap_info": null, - "warnings": null, - "auth": null - }`, - password, - ) - }) - - return httptest.NewServer(mux) -} - -// TestExecServer_Run tests various scenarios of using vault agent as a process -// supervisor. At its core is a sample application referred to as 'test app', -// compiled from ./test-app/main.go. Each test case verifies that the test app -// is started and/or stopped correctly by exec.Server.Run(). There are 3 -// high-level scenarios we want to test for: -// -// 1. test app is started and is injected with environment variables -// 2. test app exits early (either with zero or non-zero extit code) -// 3. test app needs to be stopped (and restarted) by exec.Server -func TestExecServer_Run(t *testing.T) { - // we must build a test-app binary since 'go run' does not propagate signals correctly - goBinary, err := exec.LookPath("go") - if err != nil { - t.Fatalf("could not find go binary on path: %s", err) - } - - testAppBinary := filepath.Join(os.TempDir(), "test-app") - - if err := exec.Command(goBinary, "build", "-o", testAppBinary, "./test-app").Run(); err != nil { - t.Fatalf("could not build the test application: %s", err) - } - defer func() { - if err := os.Remove(testAppBinary); err != nil { - t.Fatalf("could not remove %q test application: %s", testAppBinary, err) - } - }() - - testCases := map[string]struct { - // skip this test case - skip bool - skipReason string - - // inputs to the exec server - envTemplates []*ctconfig.TemplateConfig - staticSecretRenderInterval time.Duration - - // test app parameters - testAppArgs []string - testAppStopSignal os.Signal - testAppPort int - - // simulate a shutdown of agent, which, in turn stops the test app - simulateShutdown bool - simulateShutdownWaitDuration time.Duration - - // expected results - expected map[string]string - expectedTestDuration time.Duration - expectedError error - }{ - "ensure_environment_variables_are_injected": { - skip: true, - envTemplates: []*ctconfig.TemplateConfig{{ - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.user }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_USER"), - }, { - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.password }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_PASSWORD"), - }}, - testAppArgs: []string{"--stop-after", "10s"}, - testAppStopSignal: syscall.SIGTERM, - testAppPort: 34001, - expected: map[string]string{ - "MY_USER": "app-user", - "MY_PASSWORD": "s3cr3t", - }, - expectedTestDuration: 15 * time.Second, - expectedError: nil, - }, - - "password_changes_test_app_should_restart": { - envTemplates: []*ctconfig.TemplateConfig{{ - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.user }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_USER"), - }, { - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.password }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_PASSWORD"), - }}, - staticSecretRenderInterval: 5 * time.Second, - testAppArgs: []string{"--stop-after", "15s", "--sleep-after-stop-signal", "0s"}, - testAppStopSignal: syscall.SIGTERM, - testAppPort: 34002, - expected: map[string]string{ - "MY_USER": "app-user", - "MY_PASSWORD": "s3cr3t-two", - }, - expectedTestDuration: 15 * time.Second, - expectedError: nil, - }, - - "test_app_exits_early": { - skip: true, - envTemplates: []*ctconfig.TemplateConfig{{ - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.user }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_USER"), - }}, - testAppArgs: []string{"--stop-after", "1s"}, - testAppStopSignal: syscall.SIGTERM, - testAppPort: 34003, - expectedTestDuration: 15 * time.Second, - expectedError: &ProcessExitError{0}, - }, - - "test_app_exits_early_non_zero": { - skip: true, - envTemplates: []*ctconfig.TemplateConfig{{ - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.user }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_USER"), - }}, - testAppArgs: []string{"--stop-after", "1s", "--exit-code", "5"}, - testAppStopSignal: syscall.SIGTERM, - testAppPort: 34004, - expectedTestDuration: 15 * time.Second, - expectedError: &ProcessExitError{5}, - }, - - "send_sigterm_expect_test_app_exit": { - skip: true, - envTemplates: []*ctconfig.TemplateConfig{{ - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.user }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_USER"), - }}, - testAppArgs: []string{"--stop-after", "30s", "--sleep-after-stop-signal", "1s"}, - testAppStopSignal: syscall.SIGTERM, - testAppPort: 34005, - simulateShutdown: true, - simulateShutdownWaitDuration: 3 * time.Second, - expectedTestDuration: 15 * time.Second, - expectedError: nil, - }, - - "send_sigusr1_expect_test_app_exit": { - skip: true, - envTemplates: []*ctconfig.TemplateConfig{{ - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.user }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_USER"), - }}, - testAppArgs: []string{"--stop-after", "30s", "--sleep-after-stop-signal", "1s", "--use-sigusr1"}, - testAppStopSignal: syscall.SIGUSR1, - testAppPort: 34006, - simulateShutdown: true, - simulateShutdownWaitDuration: 3 * time.Second, - expectedTestDuration: 15 * time.Second, - expectedError: nil, - }, - - "test_app_ignores_stop_signal": { - skip: true, - skipReason: "This test currently fails with 'go test -race' (see hashicorp/consul-template/issues/1753).", - envTemplates: []*ctconfig.TemplateConfig{{ - Contents: pointerutil.StringPtr(`{{ with secret "kv/my-app/creds" }}{{ .Data.data.user }}{{ end }}`), - MapToEnvironmentVariable: pointerutil.StringPtr("MY_USER"), - }}, - testAppArgs: []string{"--stop-after", "60s", "--sleep-after-stop-signal", "60s"}, - testAppStopSignal: syscall.SIGTERM, - testAppPort: 34007, - simulateShutdown: true, - simulateShutdownWaitDuration: 32 * time.Second, // the test app should be stopped immediately after 30s - expectedTestDuration: 45 * time.Second, - expectedError: nil, - }, - } - - for name, testCase := range testCases { - t.Run(name, func(t *testing.T) { - if testCase.skip { - t.Skip(testCase.skipReason) - } - - t.Logf("test case %s: begin", name) - defer t.Logf("test case %s: end", name) - - fakeVault := fakeVaultServer(t) - defer fakeVault.Close() - - ctx, cancelContextFunc := context.WithTimeout(context.Background(), testCase.expectedTestDuration) - defer cancelContextFunc() - - testAppCommand := []string{ - testAppBinary, - "--port", - strconv.Itoa(testCase.testAppPort), - } - - execServer := NewServer(&ServerConfig{ - Logger: logging.NewVaultLogger(hclog.Trace), - AgentConfig: &config.Config{ - Vault: &config.Vault{ - Address: fakeVault.URL, - Retry: &config.Retry{ - NumRetries: 3, - }, - }, - Exec: &config.ExecConfig{ - RestartOnSecretChanges: "always", - Command: append(testAppCommand, testCase.testAppArgs...), - RestartStopSignal: testCase.testAppStopSignal, - }, - EnvTemplates: testCase.envTemplates, - TemplateConfig: &config.TemplateConfig{ - ExitOnRetryFailure: true, - StaticSecretRenderInt: testCase.staticSecretRenderInterval, - }, - }, - LogLevel: hclog.Trace, - LogWriter: hclog.DefaultOutput, - }) - - // start the exec server - var ( - execServerErrCh = make(chan error) - execServerTokenCh = make(chan string, 1) - ) - go func() { - execServerErrCh <- execServer.Run(ctx, execServerTokenCh) - }() - - // send a dummy token to kick off the server - execServerTokenCh <- "my-token" - - // ensure the test app is running after 3 seconds - var ( - testAppAddr = fmt.Sprintf("http://localhost:%d", testCase.testAppPort) - testAppStartedCh = make(chan error) - ) - if testCase.expectedError == nil { - time.AfterFunc(500*time.Millisecond, func() { - _, err := retryablehttp.Head(testAppAddr) - testAppStartedCh <- err - }) - } - - select { - case <-ctx.Done(): - t.Fatal("timeout reached before templates were rendered") - - case err := <-execServerErrCh: - if testCase.expectedError == nil && err != nil { - t.Fatalf("exec server did not expect an error, got: %v", err) - } - - if errors.Is(err, testCase.expectedError) { - t.Fatalf("exec server expected error %v; got %v", testCase.expectedError, err) - } - - t.Log("exec server exited without an error") - - return - - case err := <-testAppStartedCh: - if testCase.expectedError == nil && err != nil { - t.Fatalf("test app could not be started") - } - - t.Log("test app started successfully") - } - - // expect the test app to restart after staticSecretRenderInterval + debounce timer due to a password change - if testCase.staticSecretRenderInterval != 0 { - t.Logf("sleeping for %v to wait for application restart", testCase.staticSecretRenderInterval+5*time.Second) - time.Sleep(testCase.staticSecretRenderInterval + 5*time.Second) - } - - // simulate a shutdown of agent, which, in turn stops the test app - if testCase.simulateShutdown { - cancelContextFunc() - - time.Sleep(testCase.simulateShutdownWaitDuration) - - // check if the test app is still alive - if _, err := http.Head(testAppAddr); err == nil { - t.Fatalf("the test app is still alive %v after a simulated shutdown!", testCase.simulateShutdownWaitDuration) - } - - return - } - - // verify the environment variables - t.Logf("verifying test-app's environment variables") - - resp, err := retryablehttp.Get(testAppAddr) - if err != nil { - t.Fatalf("error making request to the test app: %s", err) - } - defer resp.Body.Close() - - decoder := json.NewDecoder(resp.Body) - var response struct { - EnvironmentVariables map[string]string `json:"environment_variables"` - ProcessID int `json:"process_id"` - } - if err := decoder.Decode(&response); err != nil { - t.Fatalf("unable to parse response from test app: %s", err) - } - - for key, expectedValue := range testCase.expected { - actualValue, ok := response.EnvironmentVariables[key] - if !ok { - t.Fatalf("expected the test app to return %q environment variable", key) - } - if expectedValue != actualValue { - t.Fatalf("expected environment variable %s to have a value of %q but it has a value of %q", key, expectedValue, actualValue) - } - } - }) - } -} diff --git a/command/agent/exec/test-app/main.go b/command/agent/exec/test-app/main.go deleted file mode 100644 index 311ac7eb657b60..00000000000000 --- a/command/agent/exec/test-app/main.go +++ /dev/null @@ -1,150 +0,0 @@ -package main - -// This is a test application that is used by TestExecServer_Run to verify -// the behavior of vault agent running as a process supervisor. -// -// The app will automatically exit after 1 minute or the --stop-after interval, -// whichever comes first. It also can serve its loaded environment variables on -// the given --port. This app will also return the given --exit-code and -// terminate on SIGTERM unless --use-sigusr1 is specified. - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "flag" - "fmt" - "log" - "net/http" - "os" - "os/signal" - "strings" - "syscall" - "time" -) - -var ( - port uint - ignoreStopSignal bool - sleepAfterStopSignal time.Duration - useSigusr1StopSignal bool - stopAfter time.Duration - exitCode int -) - -func init() { - flag.UintVar(&port, "port", 34000, "port to run the test app on") - flag.DurationVar(&sleepAfterStopSignal, "sleep-after-stop-signal", 1*time.Second, "time to sleep after getting the signal before exiting") - flag.BoolVar(&useSigusr1StopSignal, "use-sigusr1", false, "use SIGUSR1 as the stop signal, instead of the default SIGTERM") - flag.DurationVar(&stopAfter, "stop-after", 0, "stop the process after duration (overrides all other flags if set)") - flag.IntVar(&exitCode, "exit-code", 0, "exit code to return when this script exits") -} - -type Response struct { - EnvironmentVariables map[string]string `json:"environment_variables"` - ProcessID int `json:"process_id"` -} - -func newResponse() Response { - respEnv := make(map[string]string, len(os.Environ())) - for _, envVar := range os.Environ() { - tokens := strings.Split(envVar, "=") - respEnv[tokens[0]] = tokens[1] - } - - return Response{ - EnvironmentVariables: respEnv, - ProcessID: os.Getpid(), - } -} - -func handler(w http.ResponseWriter, r *http.Request) { - var buf bytes.Buffer - encoder := json.NewEncoder(&buf) - if r.URL.Query().Get("pretty") == "1" { - encoder.SetIndent("", " ") - } - if err := encoder.Encode(newResponse()); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - _, _ = w.Write(buf.Bytes()) -} - -func main() { - logger := log.New(os.Stderr, "test-app: ", log.LstdFlags) - - if err := run(logger); err != nil { - log.Fatalf("error: %v\n", err) - } - - logger.Printf("exit code: %d\n", exitCode) - - os.Exit(exitCode) -} - -func run(logger *log.Logger) error { - /* */ logger.Println("run: started") - defer logger.Println("run: done") - - ctx, cancelContextFunc := context.WithTimeout(context.Background(), 60*time.Second) - defer cancelContextFunc() - - flag.Parse() - - server := http.Server{ - Addr: fmt.Sprintf(":%d", port), - Handler: http.HandlerFunc(handler), - ReadTimeout: 20 * time.Second, - WriteTimeout: 20 * time.Second, - IdleTimeout: 20 * time.Second, - } - - doneCh := make(chan struct{}) - - go func() { - defer close(doneCh) - - stopSignal := make(chan os.Signal, 1) - if useSigusr1StopSignal { - signal.Notify(stopSignal, syscall.SIGUSR1) - } else { - signal.Notify(stopSignal, syscall.SIGTERM) - } - - select { - case <-ctx.Done(): - logger.Println("context done: exiting") - - case s := <-stopSignal: - logger.Printf("signal %q: received\n", s) - - if sleepAfterStopSignal > 0 { - logger.Printf("signal %q: sleeping for %v simulate cleanup\n", s, sleepAfterStopSignal) - time.Sleep(sleepAfterStopSignal) - } - - case <-time.After(stopAfter): - logger.Printf("stopping after: %v\n", stopAfter) - } - - if err := server.Shutdown(context.Background()); err != nil { - log.Printf("server shutdown error: %v", err) - } - }() - - logger.Printf("server %s: started\n", server.Addr) - - if err := server.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { - return fmt.Errorf("could not start the server: %v", err) - } - - logger.Printf("server %s: done\n", server.Addr) - - <-doneCh - - return nil -} diff --git a/command/agent/internal/ctmanager/runner_config.go b/command/agent/internal/ctmanager/runner_config.go deleted file mode 100644 index 90974981771e8a..00000000000000 --- a/command/agent/internal/ctmanager/runner_config.go +++ /dev/null @@ -1,149 +0,0 @@ -package ctmanager - -import ( - "fmt" - "io" - "strings" - - ctconfig "github.com/hashicorp/consul-template/config" - ctlogging "github.com/hashicorp/consul-template/logging" - "github.com/hashicorp/go-hclog" - - "github.com/hashicorp/vault/command/agent/config" - "github.com/hashicorp/vault/sdk/helper/pointerutil" -) - -type ManagerConfig struct { - AgentConfig *config.Config - Namespace string - LogLevel hclog.Level - LogWriter io.Writer -} - -// NewConfig returns a consul-template runner configuration, setting the -// Vault and Consul configurations based on the clients configs. -func NewConfig(mc ManagerConfig, templates ctconfig.TemplateConfigs) (*ctconfig.Config, error) { - conf := ctconfig.DefaultConfig() - conf.Templates = templates.Copy() - - // Setup the Vault config - // Always set these to ensure nothing is picked up from the environment - conf.Vault.RenewToken = pointerutil.BoolPtr(false) - conf.Vault.Token = pointerutil.StringPtr("") - conf.Vault.Address = &mc.AgentConfig.Vault.Address - - if mc.Namespace != "" { - conf.Vault.Namespace = &mc.Namespace - } - - if mc.AgentConfig.TemplateConfig != nil && mc.AgentConfig.TemplateConfig.StaticSecretRenderInt != 0 { - conf.Vault.DefaultLeaseDuration = &mc.AgentConfig.TemplateConfig.StaticSecretRenderInt - } - - if mc.AgentConfig.DisableIdleConnsTemplating { - idleConns := -1 - conf.Vault.Transport.MaxIdleConns = &idleConns - } - - if mc.AgentConfig.DisableKeepAlivesTemplating { - conf.Vault.Transport.DisableKeepAlives = pointerutil.BoolPtr(true) - } - - conf.Vault.SSL = &ctconfig.SSLConfig{ - Enabled: pointerutil.BoolPtr(false), - Verify: pointerutil.BoolPtr(false), - Cert: pointerutil.StringPtr(""), - Key: pointerutil.StringPtr(""), - CaCert: pointerutil.StringPtr(""), - CaPath: pointerutil.StringPtr(""), - ServerName: pointerutil.StringPtr(""), - } - - // If Vault.Retry isn't specified, use the default of 12 retries. - // This retry value will be respected regardless of if we use the cache. - attempts := ctconfig.DefaultRetryAttempts - if mc.AgentConfig.Vault != nil && mc.AgentConfig.Vault.Retry != nil { - attempts = mc.AgentConfig.Vault.Retry.NumRetries - } - - // Use the cache if available or fallback to the Vault server values. - if mc.AgentConfig.Cache != nil { - if mc.AgentConfig.Cache.InProcDialer == nil { - return nil, fmt.Errorf("missing in-process dialer configuration") - } - if conf.Vault.Transport == nil { - conf.Vault.Transport = &ctconfig.TransportConfig{} - } - conf.Vault.Transport.CustomDialer = mc.AgentConfig.Cache.InProcDialer - // The in-process dialer ignores the address passed in, but we're still - // setting it here to override the setting at the top of this function, - // and to prevent the vault/http client from defaulting to https. - conf.Vault.Address = pointerutil.StringPtr("http://127.0.0.1:8200") - } else if strings.HasPrefix(mc.AgentConfig.Vault.Address, "https") || mc.AgentConfig.Vault.CACert != "" { - skipVerify := mc.AgentConfig.Vault.TLSSkipVerify - verify := !skipVerify - conf.Vault.SSL = &ctconfig.SSLConfig{ - Enabled: pointerutil.BoolPtr(true), - Verify: &verify, - Cert: &mc.AgentConfig.Vault.ClientCert, - Key: &mc.AgentConfig.Vault.ClientKey, - CaCert: &mc.AgentConfig.Vault.CACert, - CaPath: &mc.AgentConfig.Vault.CAPath, - ServerName: &mc.AgentConfig.Vault.TLSServerName, - } - } - enabled := attempts > 0 - conf.Vault.Retry = &ctconfig.RetryConfig{ - Attempts: &attempts, - Enabled: &enabled, - } - - // Sync Consul Template's retry with user set auto-auth initial backoff value. - // This is helpful if Auto Auth cannot get a new token and CT is trying to fetch - // secrets. - if mc.AgentConfig.AutoAuth != nil && mc.AgentConfig.AutoAuth.Method != nil { - if mc.AgentConfig.AutoAuth.Method.MinBackoff > 0 { - conf.Vault.Retry.Backoff = &mc.AgentConfig.AutoAuth.Method.MinBackoff - } - - if mc.AgentConfig.AutoAuth.Method.MaxBackoff > 0 { - conf.Vault.Retry.MaxBackoff = &mc.AgentConfig.AutoAuth.Method.MaxBackoff - } - } - - conf.Finalize() - - // setup log level from TemplateServer config - conf.LogLevel = logLevelToStringPtr(mc.LogLevel) - - if err := ctlogging.Setup(&ctlogging.Config{ - Level: *conf.LogLevel, - Writer: mc.LogWriter, - }); err != nil { - return nil, err - } - return conf, nil -} - -// logLevelToString converts a go-hclog level to a matching, uppercase string -// value. It's used to convert Vault Agent's hclog level to a string version -// suitable for use in Consul Template's runner configuration input. -func logLevelToStringPtr(level hclog.Level) *string { - // consul template's default level is WARN, but Vault Agent's default is INFO, - // so we use that for the Runner's default. - var levelStr string - - switch level { - case hclog.Trace: - levelStr = "TRACE" - case hclog.Debug: - levelStr = "DEBUG" - case hclog.Warn: - levelStr = "WARN" - case hclog.Error: - levelStr = "ERR" - default: - levelStr = "INFO" - } - return pointerutil.StringPtr(levelStr) -} diff --git a/command/agent/jwt_end_to_end_test.go b/command/agent/jwt_end_to_end_test.go index 4739a65c314a4c..c2d74d9f37dc0d 100644 --- a/command/agent/jwt_end_to_end_test.go +++ b/command/agent/jwt_end_to_end_test.go @@ -1,12 +1,9 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( "context" "encoding/json" - "fmt" + "io/ioutil" "os" "testing" "time" @@ -14,10 +11,10 @@ import ( hclog "github.com/hashicorp/go-hclog" vaultjwt "github.com/hashicorp/vault-plugin-auth-jwt" "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentjwt "github.com/hashicorp/vault/command/agentproxyshared/auth/jwt" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" + "github.com/hashicorp/vault/command/agent/auth" + agentjwt "github.com/hashicorp/vault/command/agent/auth/jwt" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" "github.com/hashicorp/vault/helper/dhutil" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/jsonutil" @@ -27,32 +24,11 @@ import ( ) func TestJWTEndToEnd(t *testing.T) { - t.Parallel() - testCases := []struct { - ahWrapping bool - useSymlink bool - removeJWTAfterReading bool - }{ - {false, false, false}, - {true, false, false}, - {false, true, false}, - {true, true, false}, - {false, false, true}, - {true, false, true}, - {false, true, true}, - {true, true, true}, - } - - for _, tc := range testCases { - tc := tc // capture range variable - t.Run(fmt.Sprintf("ahWrapping=%v, useSymlink=%v, removeJWTAfterReading=%v", tc.ahWrapping, tc.useSymlink, tc.removeJWTAfterReading), func(t *testing.T) { - t.Parallel() - testJWTEndToEnd(t, tc.ahWrapping, tc.useSymlink, tc.removeJWTAfterReading) - }) - } + testJWTEndToEnd(t, false) + testJWTEndToEnd(t, true) } -func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading bool) { +func testJWTEndToEnd(t *testing.T, ahWrapping bool) { logger := logging.NewVaultLogger(hclog.Trace) coreConfig := &vault.CoreConfig{ Logger: logger, @@ -107,24 +83,16 @@ func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading // We close these right away because we're just basically testing // permissions and finding a usable file name - inf, err := os.CreateTemp("", "auth.jwt.test.") + inf, err := ioutil.TempFile("", "auth.jwt.test.") if err != nil { t.Fatal(err) } in := inf.Name() inf.Close() os.Remove(in) - symlink, err := os.CreateTemp("", "auth.jwt.symlink.test.") - if err != nil { - t.Fatal(err) - } - symlinkName := symlink.Name() - symlink.Close() - os.Remove(symlinkName) - os.Symlink(in, symlinkName) t.Logf("input: %s", in) - ouf, err := os.CreateTemp("", "auth.tokensink.test.") + ouf, err := ioutil.TempFile("", "auth.tokensink.test.") if err != nil { t.Fatal(err) } @@ -133,7 +101,7 @@ func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading os.Remove(out) t.Logf("output: %s", out) - dhpathf, err := os.CreateTemp("", "auth.dhpath.test.") + dhpathf, err := ioutil.TempFile("", "auth.dhpath.test.") if err != nil { t.Fatal(err) } @@ -148,7 +116,7 @@ func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading if err != nil { t.Fatal(err) } - if err := os.WriteFile(dhpath, mPubKey, 0o600); err != nil { + if err := ioutil.WriteFile(dhpath, mPubKey, 0o600); err != nil { t.Fatal(err) } else { logger.Trace("wrote dh param file", "path", dhpath) @@ -156,21 +124,12 @@ func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - var fileNameToUseAsPath string - if useSymlink { - fileNameToUseAsPath = symlinkName - } else { - fileNameToUseAsPath = in - } am, err := agentjwt.NewJWTAuthMethod(&auth.AuthConfig{ Logger: logger.Named("auth.jwt"), MountPath: "auth/jwt", Config: map[string]interface{}{ - "path": fileNameToUseAsPath, - "role": "test", - "remove_jwt_after_reading": removeJWTAfterReading, - "remove_jwt_follows_symlinks": true, - "jwt_read_period": "0.5s", + "path": in, + "role": "test", }, }) if err != nil { @@ -266,8 +225,7 @@ func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading // Get a token jwtToken, _ := GetTestJWT(t) - - if err := os.WriteFile(in, []byte(jwtToken), 0o600); err != nil { + if err := ioutil.WriteFile(in, []byte(jwtToken), 0o600); err != nil { t.Fatal(err) } else { logger.Trace("wrote test jwt", "path", in) @@ -279,29 +237,13 @@ func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading if time.Now().After(timeout) { t.Fatal("did not find a written token after timeout") } - val, err := os.ReadFile(out) + val, err := ioutil.ReadFile(out) if err == nil { os.Remove(out) if len(val) == 0 { t.Fatal("written token was empty") } - // First, ensure JWT has been removed - if removeJWTAfterReading { - _, err = os.Stat(in) - if err == nil { - t.Fatal("no error returned from stat, indicating the jwt is still present") - } - if !os.IsNotExist(err) { - t.Fatalf("unexpected error: %v", err) - } - } else { - _, err := os.Stat(in) - if err != nil { - t.Fatal("JWT file removed despite removeJWTAfterReading being set to false") - } - } - // First decrypt it resp := new(dhutil.Envelope) if err := jsonutil.DecodeJSON(val, resp); err != nil { @@ -394,7 +336,7 @@ func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading // Get another token to test the backend pushing the need to authenticate // to the handler jwtToken, _ = GetTestJWT(t) - if err := os.WriteFile(in, []byte(jwtToken), 0o600); err != nil { + if err := ioutil.WriteFile(in, []byte(jwtToken), 0o600); err != nil { t.Fatal(err) } diff --git a/command/agent/oci_end_to_end_test.go b/command/agent/oci_end_to_end_test.go deleted file mode 100644 index 2349f09abe8d1a..00000000000000 --- a/command/agent/oci_end_to_end_test.go +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package agent - -import ( - "context" - "io/ioutil" - "os" - "testing" - "time" - - hclog "github.com/hashicorp/go-hclog" - vaultoci "github.com/hashicorp/vault-plugin-auth-oci" - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - agentoci "github.com/hashicorp/vault/command/agentproxyshared/auth/oci" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" - "github.com/hashicorp/vault/helper/testhelpers" - vaulthttp "github.com/hashicorp/vault/http" - "github.com/hashicorp/vault/sdk/helper/logging" - "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/vault" -) - -const ( - envVarOCITestTenancyOCID = "OCI_TEST_TENANCY_OCID" - envVarOCITestUserOCID = "OCI_TEST_USER_OCID" - envVarOCITestFingerprint = "OCI_TEST_FINGERPRINT" - envVarOCITestPrivateKeyPath = "OCI_TEST_PRIVATE_KEY_PATH" - envVAROCITestOCIDList = "OCI_TEST_OCID_LIST" - - // The OCI SDK doesn't export its standard env vars so they're captured here. - // These are used for the duration of the test to make sure the agent is able to - // pick up creds from the env. - // - // To run this test, do not set these. Only the above ones need to be set. - envVarOCITenancyOCID = "OCI_tenancy_ocid" - envVarOCIUserOCID = "OCI_user_ocid" - envVarOCIFingerprint = "OCI_fingerprint" - envVarOCIPrivateKeyPath = "OCI_private_key_path" -) - -func TestOCIEndToEnd(t *testing.T) { - if !runAcceptanceTests { - t.SkipNow() - } - - // Ensure each cred is populated. - credNames := []string{ - envVarOCITestTenancyOCID, - envVarOCITestUserOCID, - envVarOCITestFingerprint, - envVarOCITestPrivateKeyPath, - envVAROCITestOCIDList, - } - testhelpers.SkipUnlessEnvVarsSet(t, credNames) - - logger := logging.NewVaultLogger(hclog.Trace) - coreConfig := &vault.CoreConfig{ - Logger: logger, - CredentialBackends: map[string]logical.Factory{ - "oci": vaultoci.Factory, - }, - } - cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - }) - cluster.Start() - defer cluster.Cleanup() - - vault.TestWaitActive(t, cluster.Cores[0].Core) - client := cluster.Cores[0].Client - - // Setup Vault - if err := client.Sys().EnableAuthWithOptions("oci", &api.EnableAuthOptions{ - Type: "oci", - }); err != nil { - t.Fatal(err) - } - - if _, err := client.Logical().Write("auth/oci/config", map[string]interface{}{ - "home_tenancy_id": os.Getenv(envVarOCITestTenancyOCID), - }); err != nil { - t.Fatal(err) - } - - if _, err := client.Logical().Write("auth/oci/role/test", map[string]interface{}{ - "ocid_list": os.Getenv(envVAROCITestOCIDList), - }); err != nil { - t.Fatal(err) - } - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - - // We're going to feed oci auth creds via env variables. - if err := setOCIEnvCreds(); err != nil { - t.Fatal(err) - } - defer func() { - if err := unsetOCIEnvCreds(); err != nil { - t.Fatal(err) - } - }() - - vaultAddr := "http://" + cluster.Cores[0].Listeners[0].Addr().String() - - am, err := agentoci.NewOCIAuthMethod(&auth.AuthConfig{ - Logger: logger.Named("auth.oci"), - MountPath: "auth/oci", - Config: map[string]interface{}{ - "type": "apikey", - "role": "test", - }, - }, vaultAddr) - if err != nil { - t.Fatal(err) - } - - ahConfig := &auth.AuthHandlerConfig{ - Logger: logger.Named("auth.handler"), - Client: client, - } - - ah := auth.NewAuthHandler(ahConfig) - errCh := make(chan error) - go func() { - errCh <- ah.Run(ctx, am) - }() - defer func() { - select { - case <-ctx.Done(): - case err := <-errCh: - if err != nil { - t.Fatal(err) - } - } - }() - - tmpFile, err := ioutil.TempFile("", "auth.tokensink.test.") - if err != nil { - t.Fatal(err) - } - tokenSinkFileName := tmpFile.Name() - tmpFile.Close() - os.Remove(tokenSinkFileName) - t.Logf("output: %s", tokenSinkFileName) - - config := &sink.SinkConfig{ - Logger: logger.Named("sink.file"), - Config: map[string]interface{}{ - "path": tokenSinkFileName, - }, - WrapTTL: 10 * time.Second, - } - - fs, err := file.NewFileSink(config) - if err != nil { - t.Fatal(err) - } - config.Sink = fs - - ss := sink.NewSinkServer(&sink.SinkServerConfig{ - Logger: logger.Named("sink.server"), - Client: client, - }) - go func() { - errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}) - }() - defer func() { - select { - case <-ctx.Done(): - case err := <-errCh: - if err != nil { - t.Fatal(err) - } - } - }() - - // This has to be after the other defers so it happens first. It allows - // successful test runs to immediately cancel all of the runner goroutines - // and unblock any of the blocking defer calls by the runner's DoneCh that - // comes before this and avoid successful tests from taking the entire - // timeout duration. - defer cancel() - - if stat, err := os.Lstat(tokenSinkFileName); err == nil { - t.Fatalf("expected err but got %s", stat) - } else if !os.IsNotExist(err) { - t.Fatal("expected notexist err") - } - - // Wait 2 seconds for the env variables to be detected and an auth to be generated. - time.Sleep(time.Second * 2) - - token, err := readToken(tokenSinkFileName) - if err != nil { - t.Fatal(err) - } - - if token.Token == "" { - t.Fatal("expected token but didn't receive it") - } -} - -func setOCIEnvCreds() error { - if err := os.Setenv(envVarOCITenancyOCID, os.Getenv(envVarOCITestTenancyOCID)); err != nil { - return err - } - if err := os.Setenv(envVarOCIUserOCID, os.Getenv(envVarOCITestUserOCID)); err != nil { - return err - } - if err := os.Setenv(envVarOCIFingerprint, os.Getenv(envVarOCITestFingerprint)); err != nil { - return err - } - return os.Setenv(envVarOCIPrivateKeyPath, os.Getenv(envVarOCITestPrivateKeyPath)) -} - -func unsetOCIEnvCreds() error { - if err := os.Unsetenv(envVarOCITenancyOCID); err != nil { - return err - } - if err := os.Unsetenv(envVarOCIUserOCID); err != nil { - return err - } - if err := os.Unsetenv(envVarOCIFingerprint); err != nil { - return err - } - return os.Unsetenv(envVarOCIPrivateKeyPath) -} diff --git a/command/agentproxyshared/sink/file/file_sink.go b/command/agent/sink/file/file_sink.go similarity index 96% rename from command/agentproxyshared/sink/file/file_sink.go rename to command/agent/sink/file/file_sink.go index c25d99169aa018..f2faf5641797dc 100644 --- a/command/agentproxyshared/sink/file/file_sink.go +++ b/command/agent/sink/file/file_sink.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package file import ( @@ -12,7 +9,7 @@ import ( hclog "github.com/hashicorp/go-hclog" uuid "github.com/hashicorp/go-uuid" - "github.com/hashicorp/vault/command/agentproxyshared/sink" + "github.com/hashicorp/vault/command/agent/sink" ) // fileSink is a Sink implementation that writes a token to a file diff --git a/command/agentproxyshared/sink/file/file_sink_test.go b/command/agent/sink/file/file_sink_test.go similarity index 95% rename from command/agentproxyshared/sink/file/file_sink_test.go rename to command/agent/sink/file/file_sink_test.go index 95db8df19b72be..9749522b49310a 100644 --- a/command/agentproxyshared/sink/file/file_sink_test.go +++ b/command/agent/sink/file/file_sink_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package file import ( @@ -12,7 +9,7 @@ import ( hclog "github.com/hashicorp/go-hclog" uuid "github.com/hashicorp/go-uuid" - "github.com/hashicorp/vault/command/agentproxyshared/sink" + "github.com/hashicorp/vault/command/agent/sink" "github.com/hashicorp/vault/sdk/helper/logging" ) diff --git a/command/agentproxyshared/sink/file/sink_test.go b/command/agent/sink/file/sink_test.go similarity index 95% rename from command/agentproxyshared/sink/file/sink_test.go rename to command/agent/sink/file/sink_test.go index de074003bfd372..839340f0c88d96 100644 --- a/command/agentproxyshared/sink/file/sink_test.go +++ b/command/agent/sink/file/sink_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package file import ( @@ -15,7 +12,7 @@ import ( hclog "github.com/hashicorp/go-hclog" uuid "github.com/hashicorp/go-uuid" - "github.com/hashicorp/vault/command/agentproxyshared/sink" + "github.com/hashicorp/vault/command/agent/sink" "github.com/hashicorp/vault/sdk/helper/logging" ) diff --git a/command/agentproxyshared/sink/inmem/inmem_sink.go b/command/agent/sink/inmem/inmem_sink.go similarity index 81% rename from command/agentproxyshared/sink/inmem/inmem_sink.go rename to command/agent/sink/inmem/inmem_sink.go index e5804d884bad49..2dfa09115ca739 100644 --- a/command/agentproxyshared/sink/inmem/inmem_sink.go +++ b/command/agent/sink/inmem/inmem_sink.go @@ -1,14 +1,11 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package inmem import ( "errors" hclog "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agentproxyshared/cache" - "github.com/hashicorp/vault/command/agentproxyshared/sink" + "github.com/hashicorp/vault/command/agent/cache" + "github.com/hashicorp/vault/command/agent/sink" "go.uber.org/atomic" ) diff --git a/command/agentproxyshared/sink/mock/mock_sink.go b/command/agent/sink/mock/mock_sink.go similarity index 68% rename from command/agentproxyshared/sink/mock/mock_sink.go rename to command/agent/sink/mock/mock_sink.go index c39baf9c8b28f6..fb4720cc17ab2b 100644 --- a/command/agentproxyshared/sink/mock/mock_sink.go +++ b/command/agent/sink/mock/mock_sink.go @@ -1,10 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mock import ( - "github.com/hashicorp/vault/command/agentproxyshared/sink" + "github.com/hashicorp/vault/command/agent/sink" ) type mockSink struct { diff --git a/command/agentproxyshared/sink/sink.go b/command/agent/sink/sink.go similarity index 98% rename from command/agentproxyshared/sink/sink.go rename to command/agent/sink/sink.go index 2b64c1762781cd..3301421be144fe 100644 --- a/command/agentproxyshared/sink/sink.go +++ b/command/agent/sink/sink.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package sink import ( diff --git a/command/agent/template/template.go b/command/agent/template/template.go index be3ccc4eab64df..8d44e0db16d4c0 100644 --- a/command/agent/template/template.go +++ b/command/agent/template/template.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Package template is responsible for rendering user supplied templates to // disk. The Server type accepts configuration to communicate to a Vault server // and a Vault token for authentication. Internally, the Server creates a Consul @@ -13,16 +10,15 @@ import ( "errors" "fmt" "io" + "strings" "go.uber.org/atomic" ctconfig "github.com/hashicorp/consul-template/config" + ctlogging "github.com/hashicorp/consul-template/logging" "github.com/hashicorp/consul-template/manager" "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agent/config" - "github.com/hashicorp/vault/command/agent/internal/ctmanager" - "github.com/hashicorp/vault/helper/useragent" "github.com/hashicorp/vault/sdk/helper/pointerutil" ) @@ -111,14 +107,8 @@ func (ts *Server) Run(ctx context.Context, incoming chan string, templates []*ct // configuration var runnerConfig *ctconfig.Config var runnerConfigErr error - managerConfig := ctmanager.ManagerConfig{ - AgentConfig: ts.config.AgentConfig, - Namespace: ts.config.Namespace, - LogLevel: ts.config.LogLevel, - LogWriter: ts.config.LogWriter, - } - runnerConfig, runnerConfigErr = ctmanager.NewConfig(managerConfig, templates) - if runnerConfigErr != nil { + + if runnerConfig, runnerConfigErr = newRunnerConfig(ts.config, templates); runnerConfigErr != nil { return fmt.Errorf("template server failed to runner generate config: %w", runnerConfigErr) } @@ -167,8 +157,7 @@ func (ts *Server) Run(ctx context.Context, incoming chan string, templates []*ct *latestToken = token ctv := ctconfig.Config{ Vault: &ctconfig.VaultConfig{ - Token: latestToken, - ClientUserAgent: pointerutil.StringPtr(useragent.AgentTemplatingString()), + Token: latestToken, }, } @@ -235,3 +224,131 @@ func (ts *Server) Stop() { close(ts.DoneCh) } } + +// newRunnerConfig returns a consul-template runner configuration, setting the +// Vault and Consul configurations based on the clients configs. +func newRunnerConfig(sc *ServerConfig, templates ctconfig.TemplateConfigs) (*ctconfig.Config, error) { + conf := ctconfig.DefaultConfig() + conf.Templates = templates.Copy() + + // Setup the Vault config + // Always set these to ensure nothing is picked up from the environment + conf.Vault.RenewToken = pointerutil.BoolPtr(false) + conf.Vault.Token = pointerutil.StringPtr("") + conf.Vault.Address = &sc.AgentConfig.Vault.Address + + if sc.Namespace != "" { + conf.Vault.Namespace = &sc.Namespace + } + + if sc.AgentConfig.TemplateConfig != nil && sc.AgentConfig.TemplateConfig.StaticSecretRenderInt != 0 { + conf.Vault.DefaultLeaseDuration = &sc.AgentConfig.TemplateConfig.StaticSecretRenderInt + } + + if sc.AgentConfig.DisableIdleConnsTemplating { + idleConns := -1 + conf.Vault.Transport.MaxIdleConns = &idleConns + } + + if sc.AgentConfig.DisableKeepAlivesTemplating { + conf.Vault.Transport.DisableKeepAlives = pointerutil.BoolPtr(true) + } + + conf.Vault.SSL = &ctconfig.SSLConfig{ + Enabled: pointerutil.BoolPtr(false), + Verify: pointerutil.BoolPtr(false), + Cert: pointerutil.StringPtr(""), + Key: pointerutil.StringPtr(""), + CaCert: pointerutil.StringPtr(""), + CaPath: pointerutil.StringPtr(""), + ServerName: pointerutil.StringPtr(""), + } + + // If Vault.Retry isn't specified, use the default of 12 retries. + // This retry value will be respected regardless of if we use the cache. + attempts := ctconfig.DefaultRetryAttempts + if sc.AgentConfig.Vault != nil && sc.AgentConfig.Vault.Retry != nil { + attempts = sc.AgentConfig.Vault.Retry.NumRetries + } + + // Use the cache if available or fallback to the Vault server values. + if sc.AgentConfig.Cache != nil { + if sc.AgentConfig.Cache.InProcDialer == nil { + return nil, fmt.Errorf("missing in-process dialer configuration") + } + if conf.Vault.Transport == nil { + conf.Vault.Transport = &ctconfig.TransportConfig{} + } + conf.Vault.Transport.CustomDialer = sc.AgentConfig.Cache.InProcDialer + // The in-process dialer ignores the address passed in, but we're still + // setting it here to override the setting at the top of this function, + // and to prevent the vault/http client from defaulting to https. + conf.Vault.Address = pointerutil.StringPtr("http://127.0.0.1:8200") + } else if strings.HasPrefix(sc.AgentConfig.Vault.Address, "https") || sc.AgentConfig.Vault.CACert != "" { + skipVerify := sc.AgentConfig.Vault.TLSSkipVerify + verify := !skipVerify + conf.Vault.SSL = &ctconfig.SSLConfig{ + Enabled: pointerutil.BoolPtr(true), + Verify: &verify, + Cert: &sc.AgentConfig.Vault.ClientCert, + Key: &sc.AgentConfig.Vault.ClientKey, + CaCert: &sc.AgentConfig.Vault.CACert, + CaPath: &sc.AgentConfig.Vault.CAPath, + ServerName: &sc.AgentConfig.Vault.TLSServerName, + } + } + enabled := attempts > 0 + conf.Vault.Retry = &ctconfig.RetryConfig{ + Attempts: &attempts, + Enabled: &enabled, + } + + // Sync Consul Template's retry with user set auto-auth initial backoff value. + // This is helpful if Auto Auth cannot get a new token and CT is trying to fetch + // secrets. + if sc.AgentConfig.AutoAuth != nil && sc.AgentConfig.AutoAuth.Method != nil { + if sc.AgentConfig.AutoAuth.Method.MinBackoff > 0 { + conf.Vault.Retry.Backoff = &sc.AgentConfig.AutoAuth.Method.MinBackoff + } + + if sc.AgentConfig.AutoAuth.Method.MaxBackoff > 0 { + conf.Vault.Retry.MaxBackoff = &sc.AgentConfig.AutoAuth.Method.MaxBackoff + } + } + + conf.Finalize() + + // setup log level from TemplateServer config + conf.LogLevel = logLevelToStringPtr(sc.LogLevel) + + if err := ctlogging.Setup(&ctlogging.Config{ + Level: *conf.LogLevel, + Writer: sc.LogWriter, + }); err != nil { + return nil, err + } + return conf, nil +} + +// logLevelToString converts a go-hclog level to a matching, uppercase string +// value. It's used to convert Vault Agent's hclog level to a string version +// suitable for use in Consul Template's runner configuration input. +func logLevelToStringPtr(level hclog.Level) *string { + // consul template's default level is WARN, but Vault Agent's default is INFO, + // so we use that for the Runner's default. + var levelStr string + + switch level { + case hclog.Trace: + levelStr = "TRACE" + case hclog.Debug: + levelStr = "DEBUG" + case hclog.Warn: + levelStr = "WARN" + case hclog.Error: + levelStr = "ERR" + default: + levelStr = "INFO" + } + return pointerutil.StringPtr(levelStr) +} diff --git a/command/agent/template/template_test.go b/command/agent/template/template_test.go index 61822bc5bd3b0f..25999945f99d35 100644 --- a/command/agent/template/template_test.go +++ b/command/agent/template/template_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package template import ( @@ -17,8 +14,6 @@ import ( ctconfig "github.com/hashicorp/consul-template/config" "github.com/hashicorp/go-hclog" "github.com/hashicorp/vault/command/agent/config" - "github.com/hashicorp/vault/command/agent/internal/ctmanager" - "github.com/hashicorp/vault/command/agentproxyshared" "github.com/hashicorp/vault/internalshared/configutil" "github.com/hashicorp/vault/internalshared/listenerutil" "github.com/hashicorp/vault/sdk/helper/logging" @@ -28,14 +23,6 @@ import ( "google.golang.org/grpc/test/bufconn" ) -func newRunnerConfig(s *ServerConfig, configs ctconfig.TemplateConfigs) (*ctconfig.Config, error) { - managerCfg := ctmanager.ManagerConfig{ - AgentConfig: s.AgentConfig, - } - cfg, err := ctmanager.NewConfig(managerCfg, configs) - return cfg, err -} - // TestNewServer is a simple test to make sure NewServer returns a Server and // channel func TestNewServer(t *testing.T) { @@ -88,7 +75,7 @@ func newAgentConfig(listeners []*configutil.Listener, enableCache, enablePersise } if enablePersisentCache { - agentConfig.Cache.Persist = &agentproxyshared.PersistConfig{Type: "kubernetes"} + agentConfig.Cache.Persist = &config.Persist{Type: "kubernetes"} } return agentConfig diff --git a/command/agent/testing.go b/command/agent/testing.go index 04a2f0608a6c68..d4de988a98575f 100644 --- a/command/agent/testing.go +++ b/command/agent/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -9,14 +6,14 @@ import ( "crypto/x509" "encoding/json" "encoding/pem" + "io/ioutil" "os" "testing" "time" - "github.com/go-jose/go-jose/v3" - "github.com/go-jose/go-jose/v3/jwt" - "github.com/hashicorp/vault/sdk/logical" + jose "gopkg.in/square/go-jose.v2" + "gopkg.in/square/go-jose.v2/jwt" ) const envVarRunAccTests = "VAULT_ACC" @@ -64,7 +61,7 @@ func GetTestJWT(t *testing.T) (string, *ecdsa.PrivateKey) { } func readToken(fileName string) (*logical.HTTPWrapInfo, error) { - b, err := os.ReadFile(fileName) + b, err := ioutil.ReadFile(fileName) if err != nil { return nil, err } diff --git a/command/agent/token_file_end_to_end_test.go b/command/agent/token_file_end_to_end_test.go index dc7115cb18f4c3..d9819ada0260cc 100644 --- a/command/agent/token_file_end_to_end_test.go +++ b/command/agent/token_file_end_to_end_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package agent import ( @@ -11,10 +8,10 @@ import ( "time" log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - token_file "github.com/hashicorp/vault/command/agentproxyshared/auth/token-file" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" + "github.com/hashicorp/vault/command/agent/auth" + token_file "github.com/hashicorp/vault/command/agent/auth/token-file" + "github.com/hashicorp/vault/command/agent/sink" + "github.com/hashicorp/vault/command/agent/sink/file" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/logging" "github.com/hashicorp/vault/vault" diff --git a/command/agentproxyshared/winsvc/service.go b/command/agent/winsvc/service.go similarity index 76% rename from command/agentproxyshared/winsvc/service.go rename to command/agent/winsvc/service.go index edd234e0c57d23..c8d21f5c7d0af3 100644 --- a/command/agentproxyshared/winsvc/service.go +++ b/command/agent/winsvc/service.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package winsvc var chanGraceExit = make(chan int) diff --git a/command/agentproxyshared/winsvc/service_windows.go b/command/agent/winsvc/service_windows.go similarity index 93% rename from command/agentproxyshared/winsvc/service_windows.go rename to command/agent/winsvc/service_windows.go index bb16bf97aeea2f..69177e01fd66d2 100644 --- a/command/agentproxyshared/winsvc/service_windows.go +++ b/command/agent/winsvc/service_windows.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build windows package winsvc diff --git a/command/agent_generate_config.go b/command/agent_generate_config.go deleted file mode 100644 index 5c42d0e59cf8c2..00000000000000 --- a/command/agent_generate_config.go +++ /dev/null @@ -1,441 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package command - -import ( - "context" - "fmt" - "io" - "os" - paths "path" - "sort" - "strings" - "unicode" - - "github.com/hashicorp/hcl/v2/gohcl" - "github.com/hashicorp/hcl/v2/hclwrite" - "github.com/hashicorp/vault/api" - "github.com/mitchellh/cli" - "github.com/mitchellh/go-homedir" - "github.com/posener/complete" -) - -var ( - _ cli.Command = (*AgentGenerateConfigCommand)(nil) - _ cli.CommandAutocomplete = (*AgentGenerateConfigCommand)(nil) -) - -type AgentGenerateConfigCommand struct { - *BaseCommand - - flagType string - flagPaths []string - flagExec string -} - -func (c *AgentGenerateConfigCommand) Synopsis() string { - return "Generate a Vault Agent configuration file." -} - -func (c *AgentGenerateConfigCommand) Help() string { - helpText := ` -Usage: vault agent generate-config [options] [path/to/config.hcl] - - Generates a simple Vault Agent configuration file from the given parameters. - - Currently, the only supported configuration type is 'env-template', which - helps you generate a configuration file with environment variable templates - for running Vault Agent in process supervisor mode. - - For every specified secret -path, the command will attempt to generate one or - multiple 'env_template' entries based on the JSON key(s) stored in the - specified secret. If the secret -path ends with '/*', the command will - attempt to recurse through the secrets tree rooted at the given path, - generating 'env_template' entries for each encountered secret. Currently, - only kv-v1 and kv-v2 paths are supported. - - The command specified in the '-exec' option will be used to generate an - 'exec' entry, which will tell Vault Agent which child process to run. - - In addition to env_template entries, the command generates an 'auto_auth' - section with 'token_file' authentication method. While this method is very - convenient for local testing, it should NOT be used in production. Please - see https://developer.hashicorp.com/vault/docs/agentandproxy/autoauth for - a list of production-ready auto_auth methods that you can use instead. - - By default, the file will be generated in the local directory as 'agent.hcl' - unless a path is specified as an argument. - - Generate a simple environment variable template configuration: - - $ vault agent generate-config -type="env-template" \ - -exec="./my-app arg1 arg2" \ - -path="secret/foo" - - Generate an environment variable template configuration for multiple secrets: - - $ vault agent generate-config -type="env-template" \ - -exec="./my-app arg1 arg2" \ - -path="secret/foo" \ - -path="secret/bar" \ - -path="secret/my-app/*" - -` + c.Flags().Help() - - return strings.TrimSpace(helpText) -} - -func (c *AgentGenerateConfigCommand) Flags() *FlagSets { - set := NewFlagSets(c.UI) - - // Common Options - f := set.NewFlagSet("Command Options") - - f.StringVar(&StringVar{ - Name: "type", - Target: &c.flagType, - Usage: "Type of configuration file to generate; currently, only 'env-template' is supported.", - Completion: complete.PredictSet( - "env-template", - ), - }) - - f.StringSliceVar(&StringSliceVar{ - Name: "path", - Target: &c.flagPaths, - Usage: "Path to a kv-v1 or kv-v2 secret (e.g. secret/data/foo, kv-v2/prefix/*); multiple secrets and tail '*' wildcards are allowed.", - Completion: c.PredictVaultFolders(), - }) - - f.StringVar(&StringVar{ - Name: "exec", - Target: &c.flagExec, - Default: "env", - Usage: "The command to execute in agent process supervisor mode.", - }) - - return set -} - -func (c *AgentGenerateConfigCommand) AutocompleteArgs() complete.Predictor { - return complete.PredictNothing -} - -func (c *AgentGenerateConfigCommand) AutocompleteFlags() complete.Flags { - return c.Flags().Completions() -} - -func (c *AgentGenerateConfigCommand) Run(args []string) int { - flags := c.Flags() - - if err := flags.Parse(args); err != nil { - c.UI.Error(err.Error()) - return 1 - } - - args = flags.Args() - - if len(args) > 1 { - c.UI.Error(fmt.Sprintf("Too many arguments (expected at most 1, got %d)", len(args))) - return 1 - } - - if c.flagType == "" { - c.UI.Error(`Please specify a -type flag; currently only -type="env-template" is supported.`) - return 1 - } - - if c.flagType != "env-template" { - c.UI.Error(fmt.Sprintf(`%q is not a supported configuration type; currently only -type="env-template" is supported.`, c.flagType)) - return 1 - } - - client, err := c.Client() - if err != nil { - c.UI.Error(err.Error()) - return 2 - } - - config, err := generateConfiguration(context.Background(), client, c.flagExec, c.flagPaths) - if err != nil { - c.UI.Error(fmt.Sprintf("Error: %v", err)) - return 2 - } - - var configPath string - if len(args) == 1 { - configPath = args[0] - } else { - configPath = "agent.hcl" - } - - f, err := os.Create(configPath) - if err != nil { - c.UI.Error(fmt.Sprintf("Could not create configuration file %q: %v", configPath, err)) - return 3 - } - defer func() { - if err := f.Close(); err != nil { - c.UI.Error(fmt.Sprintf("Could not close configuration file %q: %v", configPath, err)) - } - }() - - if _, err := config.WriteTo(f); err != nil { - c.UI.Error(fmt.Sprintf("Could not write to configuration file %q: %v", configPath, err)) - return 3 - } - - c.UI.Info(fmt.Sprintf("Successfully generated %q configuration file!", configPath)) - - c.UI.Warn("Warning: the generated file uses 'token_file' authentication method, which is not suitable for production environments.") - - return 0 -} - -func generateConfiguration(ctx context.Context, client *api.Client, flagExec string, flagPaths []string) (io.WriterTo, error) { - var execCommand []string - if flagExec != "" { - execCommand = strings.Split(flagExec, " ") - } else { - execCommand = []string{"env"} - } - - tokenPath, err := homedir.Expand("~/.vault-token") - if err != nil { - return nil, fmt.Errorf("could not expand home directory: %w", err) - } - - templates, err := constructTemplates(ctx, client, flagPaths) - if err != nil { - return nil, fmt.Errorf("could not generate templates: %w", err) - } - - config := generatedConfig{ - AutoAuth: generatedConfigAutoAuth{ - Method: generatedConfigAutoAuthMethod{ - Type: "token_file", - Config: generatedConfigAutoAuthMethodConfig{ - TokenFilePath: tokenPath, - }, - }, - }, - TemplateConfig: generatedConfigTemplateConfig{ - StaticSecretRenderInterval: "5m", - ExitOnRetryFailure: true, - }, - Vault: generatedConfigVault{ - Address: client.Address(), - }, - Exec: generatedConfigExec{ - Command: execCommand, - RestartOnSecretChanges: "always", - RestartStopSignal: "SIGTERM", - }, - EnvTemplates: templates, - } - - contents := hclwrite.NewEmptyFile() - - gohcl.EncodeIntoBody(&config, contents.Body()) - - return contents, nil -} - -func constructTemplates(ctx context.Context, client *api.Client, paths []string) ([]generatedConfigEnvTemplate, error) { - var templates []generatedConfigEnvTemplate - - for _, path := range paths { - path = sanitizePath(path) - - mountPath, v2, err := isKVv2(path, client) - if err != nil { - return nil, fmt.Errorf("could not validate secret path %q: %w", path, err) - } - - switch { - case strings.HasSuffix(path, "/*"): - // this path contains a tail wildcard, attempt to walk the tree - t, err := constructTemplatesFromTree(ctx, client, path[:len(path)-2], mountPath, v2) - if err != nil { - return nil, fmt.Errorf("could not traverse sercet at %q: %w", path, err) - } - templates = append(templates, t...) - - case strings.Contains(path, "*"): - // don't allow any other wildcards - return nil, fmt.Errorf("the path %q cannot contain '*' wildcard characters except as the last element of the path", path) - - default: - // regular secret path - t, err := constructTemplatesFromSecret(ctx, client, path, mountPath, v2) - if err != nil { - return nil, fmt.Errorf("could not read secret at %q: %v", path, err) - } - templates = append(templates, t...) - } - } - - return templates, nil -} - -func constructTemplatesFromTree(ctx context.Context, client *api.Client, path, mountPath string, v2 bool) ([]generatedConfigEnvTemplate, error) { - var templates []generatedConfigEnvTemplate - - if v2 { - metadataPath := strings.Replace( - path, - paths.Join(mountPath, "data"), - paths.Join(mountPath, "metadata"), - 1, - ) - if path != metadataPath { - path = metadataPath - } else { - path = addPrefixToKVPath(path, mountPath, "metadata", true) - } - } - - err := walkSecretsTree(ctx, client, path, func(child string, directory bool) error { - if directory { - return nil - } - - dataPath := strings.Replace( - child, - paths.Join(mountPath, "metadata"), - paths.Join(mountPath, "data"), - 1, - ) - - t, err := constructTemplatesFromSecret(ctx, client, dataPath, mountPath, v2) - if err != nil { - return err - } - templates = append(templates, t...) - - return nil - }) - if err != nil { - return nil, err - } - - return templates, nil -} - -func constructTemplatesFromSecret(ctx context.Context, client *api.Client, path, mountPath string, v2 bool) ([]generatedConfigEnvTemplate, error) { - var templates []generatedConfigEnvTemplate - - if v2 { - path = addPrefixToKVPath(path, mountPath, "data", true) - } - - resp, err := client.Logical().ReadWithContext(ctx, path) - if err != nil { - return nil, fmt.Errorf("error querying: %w", err) - } - if resp == nil { - return nil, fmt.Errorf("secret not found") - } - - var data map[string]interface{} - if v2 { - internal, ok := resp.Data["data"] - if !ok { - return nil, fmt.Errorf("secret.Data not found") - } - data = internal.(map[string]interface{}) - } else { - data = resp.Data - } - - fields := make([]string, 0, len(data)) - - for field := range data { - fields = append(fields, field) - } - - // sort for a deterministic output - sort.Strings(fields) - - var dataContents string - if v2 { - dataContents = ".Data.data" - } else { - dataContents = ".Data" - } - - for _, field := range fields { - templates = append(templates, generatedConfigEnvTemplate{ - Name: constructDefaultEnvironmentKey(path, field), - Contents: fmt.Sprintf(`{{ with secret "%s" }}{{ %s.%s }}{{ end }}`, path, dataContents, field), - ErrorOnMissingKey: true, - }) - } - - return templates, nil -} - -func constructDefaultEnvironmentKey(path string, field string) string { - pathParts := strings.Split(path, "/") - pathPartsLast := pathParts[len(pathParts)-1] - - notLetterOrNumber := func(r rune) bool { - return !unicode.IsLetter(r) && !unicode.IsNumber(r) - } - - p1 := strings.FieldsFunc(pathPartsLast, notLetterOrNumber) - p2 := strings.FieldsFunc(field, notLetterOrNumber) - - keyParts := append(p1, p2...) - - return strings.ToUpper(strings.Join(keyParts, "_")) -} - -// Below, we are redefining a subset of the configuration-related structures -// defined under command/agent/config. Using these structures we can tailor the -// output of the generated config, while using the original structures would -// have produced an HCL document with many empty fields. The structures below -// should not be used for anything other than generation. - -type generatedConfig struct { - AutoAuth generatedConfigAutoAuth `hcl:"auto_auth,block"` - TemplateConfig generatedConfigTemplateConfig `hcl:"template_config,block"` - Vault generatedConfigVault `hcl:"vault,block"` - EnvTemplates []generatedConfigEnvTemplate `hcl:"env_template,block"` - Exec generatedConfigExec `hcl:"exec,block"` -} - -type generatedConfigTemplateConfig struct { - StaticSecretRenderInterval string `hcl:"static_secret_render_interval"` - ExitOnRetryFailure bool `hcl:"exit_on_retry_failure"` -} - -type generatedConfigExec struct { - Command []string `hcl:"command"` - RestartOnSecretChanges string `hcl:"restart_on_secret_changes"` - RestartStopSignal string `hcl:"restart_stop_signal"` -} - -type generatedConfigEnvTemplate struct { - Name string `hcl:"name,label"` - Contents string `hcl:"contents,attr"` - ErrorOnMissingKey bool `hcl:"error_on_missing_key"` -} - -type generatedConfigVault struct { - Address string `hcl:"address"` -} - -type generatedConfigAutoAuth struct { - Method generatedConfigAutoAuthMethod `hcl:"method,block"` -} - -type generatedConfigAutoAuthMethod struct { - Type string `hcl:"type"` - Config generatedConfigAutoAuthMethodConfig `hcl:"config,block"` -} - -type generatedConfigAutoAuthMethodConfig struct { - TokenFilePath string `hcl:"token_file_path"` -} diff --git a/command/agent_generate_config_test.go b/command/agent_generate_config_test.go deleted file mode 100644 index f225a7c9e8cb43..00000000000000 --- a/command/agent_generate_config_test.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package command - -import ( - "bytes" - "context" - "reflect" - "regexp" - "testing" - "time" -) - -// TestConstructTemplates tests the construcTemplates helper function -func TestConstructTemplates(t *testing.T) { - ctx, cancelContextFunc := context.WithTimeout(context.Background(), 5*time.Second) - defer cancelContextFunc() - - client, closer := testVaultServerWithSecrets(ctx, t) - defer closer() - - cases := map[string]struct { - paths []string - expected []generatedConfigEnvTemplate - expectedError bool - }{ - "kv-v1-simple": { - paths: []string{"kv-v1/foo"}, - expected: []generatedConfigEnvTemplate{ - {Contents: `{{ with secret "kv-v1/foo" }}{{ .Data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_PASSWORD"}, - {Contents: `{{ with secret "kv-v1/foo" }}{{ .Data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_USER"}, - }, - expectedError: false, - }, - - "kv-v2-simple": { - paths: []string{"kv-v2/foo"}, - expected: []generatedConfigEnvTemplate{ - {Contents: `{{ with secret "kv-v2/data/foo" }}{{ .Data.data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_PASSWORD"}, - {Contents: `{{ with secret "kv-v2/data/foo" }}{{ .Data.data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_USER"}, - }, - expectedError: false, - }, - - "kv-v2-data-in-path": { - paths: []string{"kv-v2/data/foo"}, - expected: []generatedConfigEnvTemplate{ - {Contents: `{{ with secret "kv-v2/data/foo" }}{{ .Data.data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_PASSWORD"}, - {Contents: `{{ with secret "kv-v2/data/foo" }}{{ .Data.data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_USER"}, - }, - expectedError: false, - }, - - "kv-v1-nested": { - paths: []string{"kv-v1/app-1/*"}, - expected: []generatedConfigEnvTemplate{ - {Contents: `{{ with secret "kv-v1/app-1/bar" }}{{ .Data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAR_PASSWORD"}, - {Contents: `{{ with secret "kv-v1/app-1/bar" }}{{ .Data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAR_USER"}, - {Contents: `{{ with secret "kv-v1/app-1/foo" }}{{ .Data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_PASSWORD"}, - {Contents: `{{ with secret "kv-v1/app-1/foo" }}{{ .Data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_USER"}, - {Contents: `{{ with secret "kv-v1/app-1/nested/baz" }}{{ .Data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAZ_PASSWORD"}, - {Contents: `{{ with secret "kv-v1/app-1/nested/baz" }}{{ .Data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAZ_USER"}, - }, - expectedError: false, - }, - - "kv-v2-nested": { - paths: []string{"kv-v2/app-1/*"}, - expected: []generatedConfigEnvTemplate{ - {Contents: `{{ with secret "kv-v2/data/app-1/bar" }}{{ .Data.data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAR_PASSWORD"}, - {Contents: `{{ with secret "kv-v2/data/app-1/bar" }}{{ .Data.data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAR_USER"}, - {Contents: `{{ with secret "kv-v2/data/app-1/foo" }}{{ .Data.data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_PASSWORD"}, - {Contents: `{{ with secret "kv-v2/data/app-1/foo" }}{{ .Data.data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_USER"}, - {Contents: `{{ with secret "kv-v2/data/app-1/nested/baz" }}{{ .Data.data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAZ_PASSWORD"}, - {Contents: `{{ with secret "kv-v2/data/app-1/nested/baz" }}{{ .Data.data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAZ_USER"}, - }, - expectedError: false, - }, - - "kv-v1-multi-path": { - paths: []string{"kv-v1/foo", "kv-v1/app-1/bar"}, - expected: []generatedConfigEnvTemplate{ - {Contents: `{{ with secret "kv-v1/foo" }}{{ .Data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_PASSWORD"}, - {Contents: `{{ with secret "kv-v1/foo" }}{{ .Data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_USER"}, - {Contents: `{{ with secret "kv-v1/app-1/bar" }}{{ .Data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAR_PASSWORD"}, - {Contents: `{{ with secret "kv-v1/app-1/bar" }}{{ .Data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAR_USER"}, - }, - expectedError: false, - }, - - "kv-v2-multi-path": { - paths: []string{"kv-v2/foo", "kv-v2/app-1/bar"}, - expected: []generatedConfigEnvTemplate{ - {Contents: `{{ with secret "kv-v2/data/foo" }}{{ .Data.data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_PASSWORD"}, - {Contents: `{{ with secret "kv-v2/data/foo" }}{{ .Data.data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "FOO_USER"}, - {Contents: `{{ with secret "kv-v2/data/app-1/bar" }}{{ .Data.data.password }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAR_PASSWORD"}, - {Contents: `{{ with secret "kv-v2/data/app-1/bar" }}{{ .Data.data.user }}{{ end }}`, ErrorOnMissingKey: true, Name: "BAR_USER"}, - }, - expectedError: false, - }, - - "kv-v1-path-not-found": { - paths: []string{"kv-v1/does/not/exist"}, - expected: nil, - expectedError: true, - }, - - "kv-v2-path-not-found": { - paths: []string{"kv-v2/does/not/exist"}, - expected: nil, - expectedError: true, - }, - - "kv-v1-early-wildcard": { - paths: []string{"kv-v1/*/foo"}, - expected: nil, - expectedError: true, - }, - - "kv-v2-early-wildcard": { - paths: []string{"kv-v2/*/foo"}, - expected: nil, - expectedError: true, - }, - } - - for name, tc := range cases { - name, tc := name, tc - - t.Run(name, func(t *testing.T) { - templates, err := constructTemplates(ctx, client, tc.paths) - - if tc.expectedError { - if err == nil { - t.Fatal("an error was expected but the test succeeded") - } - } else { - if err != nil { - t.Fatal(err) - } - - if !reflect.DeepEqual(tc.expected, templates) { - t.Fatalf("unexpected output; want: %v, got: %v", tc.expected, templates) - } - } - }) - } -} - -// TestGenerateConfiguration tests the generateConfiguration helper function -func TestGenerateConfiguration(t *testing.T) { - ctx, cancelContextFunc := context.WithTimeout(context.Background(), 5*time.Second) - defer cancelContextFunc() - - client, closer := testVaultServerWithSecrets(ctx, t) - defer closer() - - cases := map[string]struct { - flagExec string - flagPaths []string - expected *regexp.Regexp - expectedError bool - }{ - "kv-v1-simple": { - flagExec: "./my-app arg1 arg2", - flagPaths: []string{"kv-v1/foo"}, - expected: regexp.MustCompile(` -auto_auth \{ - - method \{ - type = "token_file" - - config \{ - token_file_path = ".*/.vault-token" - } - } -} - -template_config \{ - static_secret_render_interval = "5m" - exit_on_retry_failure = true -} - -vault \{ - address = "https://127.0.0.1:[0-9]{5}" -} - -env_template "FOO_PASSWORD" \{ - contents = "\{\{ with secret \\"kv-v1/foo\\" }}\{\{ .Data.password }}\{\{ end }}" - error_on_missing_key = true -} -env_template "FOO_USER" \{ - contents = "\{\{ with secret \\"kv-v1/foo\\" }}\{\{ .Data.user }}\{\{ end }}" - error_on_missing_key = true -} - -exec \{ - command = \["./my-app", "arg1", "arg2"\] - restart_on_secret_changes = "always" - restart_stop_signal = "SIGTERM" -} -`), - expectedError: false, - }, - - "kv-v2-default-exec": { - flagExec: "", - flagPaths: []string{"kv-v2/foo"}, - expected: regexp.MustCompile(` -auto_auth \{ - - method \{ - type = "token_file" - - config \{ - token_file_path = ".*/.vault-token" - } - } -} - -template_config \{ - static_secret_render_interval = "5m" - exit_on_retry_failure = true -} - -vault \{ - address = "https://127.0.0.1:[0-9]{5}" -} - -env_template "FOO_PASSWORD" \{ - contents = "\{\{ with secret \\"kv-v2/data/foo\\" }}\{\{ .Data.data.password }}\{\{ end }}" - error_on_missing_key = true -} -env_template "FOO_USER" \{ - contents = "\{\{ with secret \\"kv-v2/data/foo\\" }}\{\{ .Data.data.user }}\{\{ end }}" - error_on_missing_key = true -} - -exec \{ - command = \["env"\] - restart_on_secret_changes = "always" - restart_stop_signal = "SIGTERM" -} -`), - expectedError: false, - }, - } - - for name, tc := range cases { - name, tc := name, tc - - t.Run(name, func(t *testing.T) { - var config bytes.Buffer - - c, err := generateConfiguration(ctx, client, tc.flagExec, tc.flagPaths) - c.WriteTo(&config) - - if tc.expectedError { - if err == nil { - t.Fatal("an error was expected but the test succeeded") - } - } else { - if err != nil { - t.Fatal(err) - } - - if !tc.expected.MatchString(config.String()) { - t.Fatalf("unexpected output; want: %v, got: %v", tc.expected.String(), config.String()) - } - } - }) - } -} diff --git a/command/agent_test.go b/command/agent_test.go index 406bdf6ac0ab34..0dbfaa0b23984b 100644 --- a/command/agent_test.go +++ b/command/agent_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -26,8 +23,6 @@ import ( credAppRole "github.com/hashicorp/vault/builtin/credential/approle" "github.com/hashicorp/vault/command/agent" agentConfig "github.com/hashicorp/vault/command/agent/config" - "github.com/hashicorp/vault/helper/testhelpers/minimal" - "github.com/hashicorp/vault/helper/useragent" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/helper/logging" @@ -516,258 +511,6 @@ listener "tcp" { } } -// TestAgent_Template_UserAgent Validates that the User-Agent sent to Vault -// as part of Templating requests is correct. Uses the custom handler -// userAgentHandler struct defined in this test package, so that Vault validates the -// User-Agent on requests sent by Agent. -func TestAgent_Template_UserAgent(t *testing.T) { - //---------------------------------------------------- - // Start the server and agent - //---------------------------------------------------- - logger := logging.NewVaultLogger(hclog.Trace) - var h userAgentHandler - cluster := vault.NewTestCluster(t, - &vault.CoreConfig{ - Logger: logger, - CredentialBackends: map[string]logical.Factory{ - "approle": credAppRole.Factory, - }, - LogicalBackends: map[string]logical.Factory{ - "kv": logicalKv.Factory, - }, - }, - &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: vaulthttp.HandlerFunc( - func(properties *vault.HandlerProperties) http.Handler { - h.props = properties - h.userAgentToCheckFor = useragent.AgentTemplatingString() - h.pathToCheck = "/v1/secret/data" - h.requestMethodToCheck = "GET" - h.t = t - return &h - }), - }) - cluster.Start() - defer cluster.Cleanup() - - vault.TestWaitActive(t, cluster.Cores[0].Core) - serverClient := cluster.Cores[0].Client - - // Unset the environment variable so that agent picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Setenv(api.EnvVaultAddress, serverClient.Address()) - - // Enable the approle auth method - req := serverClient.NewRequest("POST", "/v1/sys/auth/approle") - req.BodyBytes = []byte(`{ - "type": "approle" - }`) - request(t, serverClient, req, 204) - - // give test-role permissions to read the kv secret - req = serverClient.NewRequest("PUT", "/v1/sys/policy/myapp-read") - req.BodyBytes = []byte(`{ - "policy": "path \"secret/*\" { capabilities = [\"read\", \"list\"] }" - }`) - request(t, serverClient, req, 204) - - // Create a named role - req = serverClient.NewRequest("PUT", "/v1/auth/approle/role/test-role") - req.BodyBytes = []byte(`{ - "token_ttl": "5m", - "token_policies":"default,myapp-read", - "policies":"default,myapp-read" - }`) - request(t, serverClient, req, 204) - - // Fetch the RoleID of the named role - req = serverClient.NewRequest("GET", "/v1/auth/approle/role/test-role/role-id") - body := request(t, serverClient, req, 200) - data := body["data"].(map[string]interface{}) - roleID := data["role_id"].(string) - - // Get a SecretID issued against the named role - req = serverClient.NewRequest("PUT", "/v1/auth/approle/role/test-role/secret-id") - body = request(t, serverClient, req, 200) - data = body["data"].(map[string]interface{}) - secretID := data["secret_id"].(string) - - // Write the RoleID and SecretID to temp files - roleIDPath := makeTempFile(t, "role_id.txt", roleID+"\n") - secretIDPath := makeTempFile(t, "secret_id.txt", secretID+"\n") - defer os.Remove(roleIDPath) - defer os.Remove(secretIDPath) - - // setup the kv secrets - req = serverClient.NewRequest("POST", "/v1/sys/mounts/secret/tune") - req.BodyBytes = []byte(`{ - "options": {"version": "2"} - }`) - request(t, serverClient, req, 200) - - // populate a secret - req = serverClient.NewRequest("POST", "/v1/secret/data/myapp") - req.BodyBytes = []byte(`{ - "data": { - "username": "bar", - "password": "zap" - } - }`) - request(t, serverClient, req, 200) - - // populate another secret - req = serverClient.NewRequest("POST", "/v1/secret/data/otherapp") - req.BodyBytes = []byte(`{ - "data": { - "username": "barstuff", - "password": "zap", - "cert": "something" - } - }`) - request(t, serverClient, req, 200) - - // make a temp directory to hold renders. Each test will create a temp dir - // inside this one - tmpDirRoot, err := os.MkdirTemp("", "agent-test-renders") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDirRoot) - // create temp dir for this test run - tmpDir, err := os.MkdirTemp(tmpDirRoot, "TestAgent_Template_UserAgent") - if err != nil { - t.Fatal(err) - } - - // make some template files - var templatePaths []string - fileName := filepath.Join(tmpDir, "render_0.tmpl") - if err := os.WriteFile(fileName, []byte(templateContents(0)), 0o600); err != nil { - t.Fatal(err) - } - templatePaths = append(templatePaths, fileName) - - // build up the template config to be added to the Agent config.hcl file - var templateConfigStrings []string - for i, t := range templatePaths { - index := fmt.Sprintf("render_%d.json", i) - s := fmt.Sprintf(templateConfigString, t, tmpDir, index) - templateConfigStrings = append(templateConfigStrings, s) - } - - // Create a config file - config := ` -vault { - address = "%s" - tls_skip_verify = true -} - -auto_auth { - method "approle" { - mount_path = "auth/approle" - config = { - role_id_file_path = "%s" - secret_id_file_path = "%s" - remove_secret_id_file_after_reading = false - } - } -} - -%s -` - - // flatten the template configs - templateConfig := strings.Join(templateConfigStrings, " ") - - config = fmt.Sprintf(config, serverClient.Address(), roleIDPath, secretIDPath, templateConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - // Start the agent - ui, cmd := testAgentCommand(t, logger) - cmd.client = serverClient - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - code := cmd.Run([]string{"-config", configPath}) - if code != 0 { - t.Errorf("non-zero return code when running agent: %d", code) - t.Logf("STDOUT from agent:\n%s", ui.OutputWriter.String()) - t.Logf("STDERR from agent:\n%s", ui.ErrorWriter.String()) - } - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - // We need to shut down the Agent command - defer func() { - cmd.ShutdownCh <- struct{}{} - wg.Wait() - }() - - verify := func(suffix string) { - t.Helper() - // We need to poll for a bit to give Agent time to render the - // templates. Without this, the test will attempt to read - // the temp dir before Agent has had time to render and will - // likely fail the test - tick := time.Tick(1 * time.Second) - timeout := time.After(10 * time.Second) - var err error - for { - select { - case <-timeout: - t.Fatalf("timed out waiting for templates to render, last error: %v", err) - case <-tick: - } - // Check for files rendered in the directory and break - // early for shutdown if we do have all the files - // rendered - - //---------------------------------------------------- - // Perform the tests - //---------------------------------------------------- - - if numFiles := testListFiles(t, tmpDir, ".json"); numFiles != len(templatePaths) { - err = fmt.Errorf("expected (%d) templates, got (%d)", len(templatePaths), numFiles) - continue - } - - for i := range templatePaths { - fileName := filepath.Join(tmpDir, fmt.Sprintf("render_%d.json", i)) - var c []byte - c, err = os.ReadFile(fileName) - if err != nil { - continue - } - if string(c) != templateRendered(i)+suffix { - err = fmt.Errorf("expected=%q, got=%q", templateRendered(i)+suffix, string(c)) - continue - } - } - return - } - } - - verify("") - - fileName = filepath.Join(tmpDir, "render_0.tmpl") - if err := os.WriteFile(fileName, []byte(templateContents(0)+"{}"), 0o600); err != nil { - t.Fatal(err) - } - - verify("{}") -} - // TestAgent_Template tests rendering templates func TestAgent_Template_Basic(t *testing.T) { //---------------------------------------------------- @@ -1405,27 +1148,6 @@ func (h *handler) ServeHTTP(resp http.ResponseWriter, req *http.Request) { vaulthttp.Handler.Handler(h.props).ServeHTTP(resp, req) } -// userAgentHandler makes it easy to test the User-Agent header received -// by Vault -type userAgentHandler struct { - props *vault.HandlerProperties - failCount int - userAgentToCheckFor string - pathToCheck string - requestMethodToCheck string - t *testing.T -} - -func (h *userAgentHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { - if req.Method == h.requestMethodToCheck && strings.Contains(req.RequestURI, h.pathToCheck) { - userAgent := req.UserAgent() - if !(userAgent == h.userAgentToCheckFor) { - h.t.Fatalf("User-Agent string not as expected. Expected to find %s, got %s", h.userAgentToCheckFor, userAgent) - } - } - vaulthttp.Handler.Handler(h.props).ServeHTTP(w, req) -} - // TestAgent_Template_Retry verifies that the template server retries requests // based on retry configuration. func TestAgent_Template_Retry(t *testing.T) { @@ -1716,345 +1438,6 @@ auto_auth { return config, cleanup } -// TestAgent_AutoAuth_UserAgent tests that the User-Agent sent -// to Vault by Vault Agent is correct when performing Auto-Auth. -// Uses the custom handler userAgentHandler (defined above) so -// that Vault validates the User-Agent on requests sent by Agent. -func TestAgent_AutoAuth_UserAgent(t *testing.T) { - logger := logging.NewVaultLogger(hclog.Trace) - var h userAgentHandler - cluster := vault.NewTestCluster(t, &vault.CoreConfig{ - Logger: logger, - CredentialBackends: map[string]logical.Factory{ - "approle": credAppRole.Factory, - }, - }, &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: vaulthttp.HandlerFunc( - func(properties *vault.HandlerProperties) http.Handler { - h.props = properties - h.userAgentToCheckFor = useragent.AgentAutoAuthString() - h.requestMethodToCheck = "PUT" - h.pathToCheck = "auth/approle/login" - h.t = t - return &h - }), - }) - cluster.Start() - defer cluster.Cleanup() - - serverClient := cluster.Cores[0].Client - - // Enable the approle auth method - req := serverClient.NewRequest("POST", "/v1/sys/auth/approle") - req.BodyBytes = []byte(`{ - "type": "approle" - }`) - request(t, serverClient, req, 204) - - // Create a named role - req = serverClient.NewRequest("PUT", "/v1/auth/approle/role/test-role") - req.BodyBytes = []byte(`{ - "secret_id_num_uses": "10", - "secret_id_ttl": "1m", - "token_max_ttl": "1m", - "token_num_uses": "10", - "token_ttl": "1m", - "policies": "default" - }`) - request(t, serverClient, req, 204) - - // Fetch the RoleID of the named role - req = serverClient.NewRequest("GET", "/v1/auth/approle/role/test-role/role-id") - body := request(t, serverClient, req, 200) - data := body["data"].(map[string]interface{}) - roleID := data["role_id"].(string) - - // Get a SecretID issued against the named role - req = serverClient.NewRequest("PUT", "/v1/auth/approle/role/test-role/secret-id") - body = request(t, serverClient, req, 200) - data = body["data"].(map[string]interface{}) - secretID := data["secret_id"].(string) - - // Write the RoleID and SecretID to temp files - roleIDPath := makeTempFile(t, "role_id.txt", roleID+"\n") - secretIDPath := makeTempFile(t, "secret_id.txt", secretID+"\n") - defer os.Remove(roleIDPath) - defer os.Remove(secretIDPath) - - sinkf, err := os.CreateTemp("", "sink.test.") - if err != nil { - t.Fatal(err) - } - sink := sinkf.Name() - sinkf.Close() - os.Remove(sink) - - autoAuthConfig := fmt.Sprintf(` -auto_auth { - method "approle" { - mount_path = "auth/approle" - config = { - role_id_file_path = "%s" - secret_id_file_path = "%s" - } - } - - sink "file" { - config = { - path = "%s" - } - } -}`, roleIDPath, secretIDPath, sink) - - listenAddr := generateListenerAddress(t) - listenConfig := fmt.Sprintf(` -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - - config := fmt.Sprintf(` -vault { - address = "%s" - tls_skip_verify = true -} -api_proxy { - use_auto_auth_token = true -} -%s -%s -`, serverClient.Address(), listenConfig, autoAuthConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - // Unset the environment variable so that agent picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Unsetenv(api.EnvVaultAddress) - - // Start the agent - _, cmd := testAgentCommand(t, logger) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - // Validate that the auto-auth token has been correctly attained - // and works for LookupSelf - conf := api.DefaultConfig() - conf.Address = "http://" + listenAddr - agentClient, err := api.NewClient(conf) - if err != nil { - t.Fatalf("err: %s", err) - } - - agentClient.SetToken("") - err = agentClient.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - - // Wait for the token to be sent to syncs and be available to be used - time.Sleep(5 * time.Second) - - req = agentClient.NewRequest("GET", "/v1/auth/token/lookup-self") - body = request(t, agentClient, req, 200) - - close(cmd.ShutdownCh) - wg.Wait() -} - -// TestAgent_APIProxyWithoutCache_UserAgent tests that the User-Agent sent -// to Vault by Vault Agent is correct using the API proxy without -// the cache configured. Uses the custom handler -// userAgentHandler struct defined in this test package, so that Vault validates the -// User-Agent on requests sent by Agent. -func TestAgent_APIProxyWithoutCache_UserAgent(t *testing.T) { - logger := logging.NewVaultLogger(hclog.Trace) - userAgentForProxiedClient := "proxied-client" - var h userAgentHandler - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: vaulthttp.HandlerFunc( - func(properties *vault.HandlerProperties) http.Handler { - h.props = properties - h.userAgentToCheckFor = useragent.AgentProxyStringWithProxiedUserAgent(userAgentForProxiedClient) - h.pathToCheck = "/v1/auth/token/lookup-self" - h.requestMethodToCheck = "GET" - h.t = t - return &h - }), - }) - cluster.Start() - defer cluster.Cleanup() - - serverClient := cluster.Cores[0].Client - - // Unset the environment variable so that agent picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Unsetenv(api.EnvVaultAddress) - - listenAddr := generateListenerAddress(t) - listenConfig := fmt.Sprintf(` -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - - config := fmt.Sprintf(` -vault { - address = "%s" - tls_skip_verify = true -} -%s -`, serverClient.Address(), listenConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - // Start the agent - _, cmd := testAgentCommand(t, logger) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - agentClient, err := api.NewClient(api.DefaultConfig()) - if err != nil { - t.Fatal(err) - } - agentClient.AddHeader("User-Agent", userAgentForProxiedClient) - agentClient.SetToken(serverClient.Token()) - agentClient.SetMaxRetries(0) - err = agentClient.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - - _, err = agentClient.Auth().Token().LookupSelf() - if err != nil { - t.Fatal(err) - } - - close(cmd.ShutdownCh) - wg.Wait() -} - -// TestAgent_APIProxyWithCache_UserAgent tests that the User-Agent sent -// to Vault by Vault Agent is correct using the API proxy with -// the cache configured. Uses the custom handler -// userAgentHandler struct defined in this test package, so that Vault validates the -// User-Agent on requests sent by Agent. -func TestAgent_APIProxyWithCache_UserAgent(t *testing.T) { - logger := logging.NewVaultLogger(hclog.Trace) - userAgentForProxiedClient := "proxied-client" - var h userAgentHandler - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: vaulthttp.HandlerFunc( - func(properties *vault.HandlerProperties) http.Handler { - h.props = properties - h.userAgentToCheckFor = useragent.AgentProxyStringWithProxiedUserAgent(userAgentForProxiedClient) - h.pathToCheck = "/v1/auth/token/lookup-self" - h.requestMethodToCheck = "GET" - h.t = t - return &h - }), - }) - cluster.Start() - defer cluster.Cleanup() - - serverClient := cluster.Cores[0].Client - - // Unset the environment variable so that agent picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Unsetenv(api.EnvVaultAddress) - - listenAddr := generateListenerAddress(t) - listenConfig := fmt.Sprintf(` -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - - cacheConfig := ` -cache { -}` - - config := fmt.Sprintf(` -vault { - address = "%s" - tls_skip_verify = true -} -%s -%s -`, serverClient.Address(), listenConfig, cacheConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - // Start the agent - _, cmd := testAgentCommand(t, logger) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - agentClient, err := api.NewClient(api.DefaultConfig()) - if err != nil { - t.Fatal(err) - } - agentClient.AddHeader("User-Agent", userAgentForProxiedClient) - agentClient.SetToken(serverClient.Token()) - agentClient.SetMaxRetries(0) - err = agentClient.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - - _, err = agentClient.Auth().Token().LookupSelf() - if err != nil { - t.Fatal(err) - } - - close(cmd.ShutdownCh) - wg.Wait() -} - func TestAgent_Cache_DynamicSecret(t *testing.T) { logger := logging.NewVaultLogger(hclog.Trace) cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ @@ -2704,7 +2087,24 @@ func TestAgent_Quit(t *testing.T) { //---------------------------------------------------- // Start the server and agent //---------------------------------------------------- - cluster := minimal.NewTestSoloCluster(t, nil) + logger := logging.NewVaultLogger(hclog.Error) + cluster := vault.NewTestCluster(t, + &vault.CoreConfig{ + Logger: logger, + CredentialBackends: map[string]logical.Factory{ + "approle": credAppRole.Factory, + }, + LogicalBackends: map[string]logical.Factory{ + "kv": logicalKv.Factory, + }, + }, + &vault.TestClusterOptions{ + NumCores: 1, + }) + cluster.Start() + defer cluster.Cleanup() + + vault.TestWaitActive(t, cluster.Cores[0].Core) serverClient := cluster.Cores[0].Client // Unset the environment variable so that agent picks up the right test @@ -2743,7 +2143,7 @@ cache {} defer os.Remove(configPath) // Start the agent - _, cmd := testAgentCommand(t, nil) + _, cmd := testAgentCommand(t, logger) cmd.startedCh = make(chan struct{}) wg := &sync.WaitGroup{} diff --git a/command/agentproxyshared/auth/oci/oci.go b/command/agentproxyshared/auth/oci/oci.go deleted file mode 100644 index ec4d9ebf750b5f..00000000000000 --- a/command/agentproxyshared/auth/oci/oci.go +++ /dev/null @@ -1,265 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package oci - -import ( - "context" - "errors" - "fmt" - "net/http" - "net/url" - "os" - "os/user" - "path" - "sync" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/hashicorp/go-secure-stdlib/parseutil" - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - "github.com/oracle/oci-go-sdk/common" - ociAuth "github.com/oracle/oci-go-sdk/common/auth" -) - -const ( - typeAPIKey = "apikey" - typeInstance = "instance" - - /* - - IAM creds can be inferred from instance metadata or the container - identity service, and those creds expire at varying intervals with - new creds becoming available at likewise varying intervals. Let's - default to polling once a minute so all changes can be picked up - rather quickly. This is configurable, however. - - */ - defaultCredCheckFreqSeconds = 60 * time.Second - - defaultConfigFileName = "config" - defaultConfigDirName = ".oci" - configFilePathEnvVarName = "OCI_CONFIG_FILE" - secondaryConfigDirName = ".oraclebmc" -) - -func NewOCIAuthMethod(conf *auth.AuthConfig, vaultAddress string) (auth.AuthMethod, error) { - if conf == nil { - return nil, errors.New("empty config") - } - if conf.Config == nil { - return nil, errors.New("empty config data") - } - - a := &ociMethod{ - logger: conf.Logger, - vaultAddress: vaultAddress, - mountPath: conf.MountPath, - credsFound: make(chan struct{}), - stopCh: make(chan struct{}), - } - - typeRaw, ok := conf.Config["type"] - if !ok { - return nil, errors.New("missing 'type' value") - } - authType, ok := typeRaw.(string) - if !ok { - return nil, errors.New("could not convert 'type' config value to string") - } - - roleRaw, ok := conf.Config["role"] - if !ok { - return nil, errors.New("missing 'role' value") - } - a.role, ok = roleRaw.(string) - if !ok { - return nil, errors.New("could not convert 'role' config value to string") - } - - // Check for an optional custom frequency at which we should poll for creds. - credCheckFreqSec := defaultCredCheckFreqSeconds - if checkFreqRaw, ok := conf.Config["credential_poll_interval"]; ok { - checkFreq, err := parseutil.ParseDurationSecond(checkFreqRaw) - if err != nil { - return nil, fmt.Errorf("could not parse credential_poll_interval: %v", err) - } - credCheckFreqSec = checkFreq - } - - switch { - case a.role == "": - return nil, errors.New("'role' value is empty") - case authType == "": - return nil, errors.New("'type' value is empty") - case authType != typeAPIKey && authType != typeInstance: - return nil, errors.New("'type' value is invalid") - case authType == typeAPIKey: - defaultConfigFile := getDefaultConfigFilePath() - homeFolder := getHomeFolder() - secondaryConfigFile := path.Join(homeFolder, secondaryConfigDirName, defaultConfigFileName) - - environmentProvider := common.ConfigurationProviderEnvironmentVariables("OCI", "") - defaultFileProvider, _ := common.ConfigurationProviderFromFile(defaultConfigFile, "") - secondaryFileProvider, _ := common.ConfigurationProviderFromFile(secondaryConfigFile, "") - - provider, _ := common.ComposingConfigurationProvider([]common.ConfigurationProvider{environmentProvider, defaultFileProvider, secondaryFileProvider}) - a.configurationProvider = provider - case authType == typeInstance: - configurationProvider, err := ociAuth.InstancePrincipalConfigurationProvider() - if err != nil { - return nil, fmt.Errorf("failed to create instance principal configuration provider: %v", err) - } - a.configurationProvider = configurationProvider - } - - // Do an initial population of the creds because we want to err right away if we can't - // even get a first set. - creds, err := a.configurationProvider.KeyID() - if err != nil { - return nil, err - } - a.lastCreds = creds - - go a.pollForCreds(credCheckFreqSec) - - return a, nil -} - -type ociMethod struct { - logger hclog.Logger - vaultAddress string - mountPath string - - configurationProvider common.ConfigurationProvider - role string - - // These are used to share the latest creds safely across goroutines. - credLock sync.Mutex - lastCreds string - - // Notifies the outer environment that it should call Authenticate again. - credsFound chan struct{} - - // Detects that the outer environment is closing. - stopCh chan struct{} -} - -func (a *ociMethod) Authenticate(context.Context, *api.Client) (string, http.Header, map[string]interface{}, error) { - a.credLock.Lock() - defer a.credLock.Unlock() - - a.logger.Trace("beginning authentication") - - requestPath := fmt.Sprintf("/v1/%s/login/%s", a.mountPath, a.role) - requestURL := fmt.Sprintf("%s%s", a.vaultAddress, requestPath) - - request, err := http.NewRequest("GET", requestURL, nil) - if err != nil { - return "", nil, nil, fmt.Errorf("error creating authentication request: %w", err) - } - - request.Header.Set("Date", time.Now().UTC().Format(http.TimeFormat)) - - signer := common.DefaultRequestSigner(a.configurationProvider) - - err = signer.Sign(request) - - if err != nil { - return "", nil, nil, fmt.Errorf("error signing authentication request: %w", err) - } - - parsedVaultAddress, err := url.Parse(a.vaultAddress) - if err != nil { - return "", nil, nil, fmt.Errorf("unable to parse vault address: %w", err) - } - - request.Header.Set("Host", parsedVaultAddress.Host) - request.Header.Set("(request-target)", fmt.Sprintf("%s %s", "get", requestPath)) - - data := map[string]interface{}{ - "request_headers": request.Header, - } - - return fmt.Sprintf("%s/login/%s", a.mountPath, a.role), nil, data, nil -} - -func (a *ociMethod) NewCreds() chan struct{} { - return a.credsFound -} - -func (a *ociMethod) CredSuccess() {} - -func (a *ociMethod) Shutdown() { - close(a.credsFound) - close(a.stopCh) -} - -func (a *ociMethod) pollForCreds(frequency time.Duration) { - ticker := time.NewTicker(frequency) - defer ticker.Stop() - for { - select { - case <-a.stopCh: - a.logger.Trace("shutdown triggered, stopping OCI auth handler") - return - case <-ticker.C: - if err := a.checkCreds(); err != nil { - a.logger.Warn("unable to retrieve current creds, retaining last creds", "error", err) - } - } - } -} - -func (a *ociMethod) checkCreds() error { - a.credLock.Lock() - defer a.credLock.Unlock() - - a.logger.Trace("checking for new credentials") - currentCreds, err := a.configurationProvider.KeyID() - if err != nil { - return err - } - // These will always have different pointers regardless of whether their - // values are identical, hence the use of DeepEqual. - if currentCreds == a.lastCreds { - a.logger.Trace("credentials are unchanged") - return nil - } - a.lastCreds = currentCreds - a.logger.Trace("new credentials detected, triggering Authenticate") - a.credsFound <- struct{}{} - return nil -} - -func getHomeFolder() string { - current, e := user.Current() - if e != nil { - // Give up and try to return something sensible - home, err := os.UserHomeDir() - if err != nil { - return "" - } - return home - } - return current.HomeDir -} - -func getDefaultConfigFilePath() string { - homeFolder := getHomeFolder() - defaultConfigFile := path.Join(homeFolder, defaultConfigDirName, defaultConfigFileName) - if _, err := os.Stat(defaultConfigFile); err == nil { - return defaultConfigFile - } - - // Read configuration file path from OCI_CONFIG_FILE env var - fallbackConfigFile, existed := os.LookupEnv(configFilePathEnvVarName) - if !existed { - return defaultConfigFile - } - if _, err := os.Stat(fallbackConfigFile); os.IsNotExist(err) { - return defaultConfigFile - } - return fallbackConfigFile -} diff --git a/command/agentproxyshared/helpers.go b/command/agentproxyshared/helpers.go deleted file mode 100644 index d1487174bdf1a0..00000000000000 --- a/command/agentproxyshared/helpers.go +++ /dev/null @@ -1,237 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package agentproxyshared - -import ( - "context" - "errors" - "fmt" - "os" - "path/filepath" - "strings" - - log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - "github.com/hashicorp/vault/command/agentproxyshared/auth/alicloud" - "github.com/hashicorp/vault/command/agentproxyshared/auth/approle" - "github.com/hashicorp/vault/command/agentproxyshared/auth/aws" - "github.com/hashicorp/vault/command/agentproxyshared/auth/azure" - "github.com/hashicorp/vault/command/agentproxyshared/auth/cert" - "github.com/hashicorp/vault/command/agentproxyshared/auth/cf" - "github.com/hashicorp/vault/command/agentproxyshared/auth/gcp" - "github.com/hashicorp/vault/command/agentproxyshared/auth/jwt" - "github.com/hashicorp/vault/command/agentproxyshared/auth/kerberos" - "github.com/hashicorp/vault/command/agentproxyshared/auth/kubernetes" - "github.com/hashicorp/vault/command/agentproxyshared/auth/oci" - token_file "github.com/hashicorp/vault/command/agentproxyshared/auth/token-file" - "github.com/hashicorp/vault/command/agentproxyshared/cache" - "github.com/hashicorp/vault/command/agentproxyshared/cache/cacheboltdb" - "github.com/hashicorp/vault/command/agentproxyshared/cache/cachememdb" - "github.com/hashicorp/vault/command/agentproxyshared/cache/keymanager" -) - -// GetAutoAuthMethodFromConfig Calls the appropriate NewAutoAuthMethod function, initializing -// the auto-auth method, based on the auto-auth method type. Returns an error if one happens or -// the method type is invalid. -func GetAutoAuthMethodFromConfig(autoAuthMethodType string, authConfig *auth.AuthConfig, vaultAddress string) (auth.AuthMethod, error) { - switch autoAuthMethodType { - case "alicloud": - return alicloud.NewAliCloudAuthMethod(authConfig) - case "aws": - return aws.NewAWSAuthMethod(authConfig) - case "azure": - return azure.NewAzureAuthMethod(authConfig) - case "cert": - return cert.NewCertAuthMethod(authConfig) - case "cf": - return cf.NewCFAuthMethod(authConfig) - case "gcp": - return gcp.NewGCPAuthMethod(authConfig) - case "jwt": - return jwt.NewJWTAuthMethod(authConfig) - case "kerberos": - return kerberos.NewKerberosAuthMethod(authConfig) - case "kubernetes": - return kubernetes.NewKubernetesAuthMethod(authConfig) - case "approle": - return approle.NewApproleAuthMethod(authConfig) - case "oci": - return oci.NewOCIAuthMethod(authConfig, vaultAddress) - case "token_file": - return token_file.NewTokenFileAuthMethod(authConfig) - case "pcf": // Deprecated. - return cf.NewCFAuthMethod(authConfig) - default: - return nil, errors.New(fmt.Sprintf("unknown auth method %q", autoAuthMethodType)) - } -} - -// PersistConfig contains configuration needed for persistent caching -type PersistConfig struct { - Type string - Path string `hcl:"path"` - KeepAfterImport bool `hcl:"keep_after_import"` - ExitOnErr bool `hcl:"exit_on_err"` - ServiceAccountTokenFile string `hcl:"service_account_token_file"` -} - -// AddPersistentStorageToLeaseCache adds persistence to a lease cache, based on a given PersistConfig -// Returns a close function to be deferred and the old token, if found, or an error -func AddPersistentStorageToLeaseCache(ctx context.Context, leaseCache *cache.LeaseCache, persistConfig *PersistConfig, logger log.Logger) (func() error, string, error) { - if persistConfig == nil { - return nil, "", errors.New("persist config was nil") - } - - if persistConfig.Path == "" { - return nil, "", errors.New("must specify persistent cache path") - } - - // Set AAD based on key protection type - var aad string - var err error - switch persistConfig.Type { - case "kubernetes": - aad, err = getServiceAccountJWT(persistConfig.ServiceAccountTokenFile) - if err != nil { - tokenFileName := persistConfig.ServiceAccountTokenFile - if len(tokenFileName) == 0 { - tokenFileName = "/var/run/secrets/kubernetes.io/serviceaccount/token" - } - return nil, "", fmt.Errorf("failed to read service account token from %s: %w", tokenFileName, err) - } - default: - return nil, "", fmt.Errorf("persistent key protection type %q not supported", persistConfig.Type) - } - - // Check if bolt file exists already - dbFileExists, err := cacheboltdb.DBFileExists(persistConfig.Path) - if err != nil { - return nil, "", fmt.Errorf("failed to check if bolt file exists at path %s: %w", persistConfig.Path, err) - } - if dbFileExists { - // Open the bolt file, but wait to setup Encryption - ps, err := cacheboltdb.NewBoltStorage(&cacheboltdb.BoltStorageConfig{ - Path: persistConfig.Path, - Logger: logger.Named("cacheboltdb"), - }) - if err != nil { - return nil, "", fmt.Errorf("error opening persistent cache %v", err) - } - - // Get the token from bolt for retrieving the encryption key, - // then setup encryption so that restore is possible - token, err := ps.GetRetrievalToken() - if err != nil { - return nil, "", fmt.Errorf("error getting retrieval token from persistent cache: %w", err) - } - - if err := ps.Close(); err != nil { - return nil, "", fmt.Errorf("failed to close persistent cache file after getting retrieval token: %w", err) - } - - km, err := keymanager.NewPassthroughKeyManager(ctx, token) - if err != nil { - return nil, "", fmt.Errorf("failed to configure persistence encryption for cache: %w", err) - } - - // Open the bolt file with the wrapper provided - ps, err = cacheboltdb.NewBoltStorage(&cacheboltdb.BoltStorageConfig{ - Path: persistConfig.Path, - Logger: logger.Named("cacheboltdb"), - Wrapper: km.Wrapper(), - AAD: aad, - }) - if err != nil { - return nil, "", fmt.Errorf("error opening persistent cache with wrapper: %w", err) - } - - // Restore anything in the persistent cache to the memory cache - if err := leaseCache.Restore(ctx, ps); err != nil { - logger.Error(fmt.Sprintf("error restoring in-memory cache from persisted file: %v", err)) - if persistConfig.ExitOnErr { - return nil, "", fmt.Errorf("exiting with error as exit_on_err is set to true") - } - } - logger.Info("loaded memcache from persistent storage") - - // Check for previous auto-auth token - oldTokenBytes, err := ps.GetAutoAuthToken(ctx) - if err != nil { - logger.Error(fmt.Sprintf("error in fetching previous auto-auth token: %v", err)) - if persistConfig.ExitOnErr { - return nil, "", fmt.Errorf("exiting with error as exit_on_err is set to true") - } - } - var previousToken string - if len(oldTokenBytes) > 0 { - oldToken, err := cachememdb.Deserialize(oldTokenBytes) - if err != nil { - logger.Error(fmt.Sprintf("error in deserializing previous auto-auth token cache entryn: %v", err)) - if persistConfig.ExitOnErr { - return nil, "", fmt.Errorf("exiting with error as exit_on_err is set to true") - } - } - previousToken = oldToken.Token - } - - // If keep_after_import true, set persistent storage layer in - // leaseCache, else remove db file - if persistConfig.KeepAfterImport { - leaseCache.SetPersistentStorage(ps) - return ps.Close, previousToken, nil - } else { - if err := ps.Close(); err != nil { - logger.Warn(fmt.Sprintf("failed to close persistent cache file: %s", err)) - } - dbFile := filepath.Join(persistConfig.Path, cacheboltdb.DatabaseFileName) - if err := os.Remove(dbFile); err != nil { - logger.Error(fmt.Sprintf("failed to remove persistent storage file %s: %v", dbFile, err)) - if persistConfig.ExitOnErr { - return nil, "", fmt.Errorf("exiting with error as exit_on_err is set to true") - } - } - return nil, previousToken, nil - } - } else { - km, err := keymanager.NewPassthroughKeyManager(ctx, nil) - if err != nil { - return nil, "", fmt.Errorf("failed to configure persistence encryption for cache: %w", err) - } - ps, err := cacheboltdb.NewBoltStorage(&cacheboltdb.BoltStorageConfig{ - Path: persistConfig.Path, - Logger: logger.Named("cacheboltdb"), - Wrapper: km.Wrapper(), - AAD: aad, - }) - if err != nil { - return nil, "", fmt.Errorf("error creating persistent cache: %w", err) - } - logger.Info("configured persistent storage", "path", persistConfig.Path) - - // Stash the key material in bolt - token, err := km.RetrievalToken(ctx) - if err != nil { - return nil, "", fmt.Errorf("error getting persistence key: %w", err) - } - if err := ps.StoreRetrievalToken(token); err != nil { - return nil, "", fmt.Errorf("error setting key in persistent cache: %w", err) - } - - leaseCache.SetPersistentStorage(ps) - return ps.Close, "", nil - } -} - -// getServiceAccountJWT attempts to read the service account JWT from the specified token file path. -// Defaults to using the Kubernetes default service account file path if token file path is empty. -func getServiceAccountJWT(tokenFile string) (string, error) { - if len(tokenFile) == 0 { - tokenFile = "/var/run/secrets/kubernetes.io/serviceaccount/token" - } - token, err := os.ReadFile(tokenFile) - if err != nil { - return "", err - } - return strings.TrimSpace(string(token)), nil -} diff --git a/command/agentproxyshared/helpers_test.go b/command/agentproxyshared/helpers_test.go deleted file mode 100644 index 24fdf1d9db5334..00000000000000 --- a/command/agentproxyshared/helpers_test.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package agentproxyshared - -import ( - "context" - "os" - "testing" - - hclog "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared/cache" - "github.com/hashicorp/vault/sdk/helper/logging" -) - -func testNewLeaseCache(t *testing.T, responses []*cache.SendResponse) *cache.LeaseCache { - t.Helper() - - client, err := api.NewClient(api.DefaultConfig()) - if err != nil { - t.Fatal(err) - } - lc, err := cache.NewLeaseCache(&cache.LeaseCacheConfig{ - Client: client, - BaseContext: context.Background(), - Proxier: cache.NewMockProxier(responses), - Logger: logging.NewVaultLogger(hclog.Trace).Named("cache.leasecache"), - }) - if err != nil { - t.Fatal(err) - } - return lc -} - -func populateTempFile(t *testing.T, name, contents string) *os.File { - t.Helper() - - file, err := os.CreateTemp(t.TempDir(), name) - if err != nil { - t.Fatal(err) - } - - _, err = file.WriteString(contents) - if err != nil { - t.Fatal(err) - } - - err = file.Close() - if err != nil { - t.Fatal(err) - } - - return file -} - -// Test_AddPersistentStorageToLeaseCache Tests that AddPersistentStorageToLeaseCache() correctly -// adds persistent storage to a lease cache -func Test_AddPersistentStorageToLeaseCache(t *testing.T) { - tempDir := t.TempDir() - serviceAccountTokenFile := populateTempFile(t, "proxy-config.hcl", "token") - - persistConfig := &PersistConfig{ - Type: "kubernetes", - Path: tempDir, - KeepAfterImport: false, - ExitOnErr: false, - ServiceAccountTokenFile: serviceAccountTokenFile.Name(), - } - - leaseCache := testNewLeaseCache(t, nil) - if leaseCache.PersistentStorage() != nil { - t.Fatal("persistent storage was available before ours was added") - } - - deferFunc, token, err := AddPersistentStorageToLeaseCache(context.Background(), leaseCache, persistConfig, logging.NewVaultLogger(hclog.Info)) - if err != nil { - t.Fatal(err) - } - - if leaseCache.PersistentStorage() == nil { - t.Fatal("persistent storage was not added") - } - - if token != "" { - t.Fatal("expected token to be empty") - } - - if deferFunc == nil { - t.Fatal("expected deferFunc to not be nil") - } -} diff --git a/command/approle_concurrency_integ_test.go b/command/approle_concurrency_integ_test.go index 934f8b33fd455e..5dbcce064c8ddc 100644 --- a/command/approle_concurrency_integ_test.go +++ b/command/approle_concurrency_integ_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/audit.go b/command/audit.go index 606de73eef8104..8acea78f36c495 100644 --- a/command/audit.go +++ b/command/audit.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/audit_disable.go b/command/audit_disable.go index ef9288b0859c29..ddebfcbeda1b10 100644 --- a/command/audit_disable.go +++ b/command/audit_disable.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/audit_disable_test.go b/command/audit_disable_test.go index 44b782f4dbe6bd..0a7e8e4dcd99e2 100644 --- a/command/audit_disable_test.go +++ b/command/audit_disable_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/audit_enable.go b/command/audit_enable.go index 652c3c27efefc6..9ed7d5d30694af 100644 --- a/command/audit_enable.go +++ b/command/audit_enable.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/audit_enable_test.go b/command/audit_enable_test.go index e7dc4ae786044e..7d19f086ad5867 100644 --- a/command/audit_enable_test.go +++ b/command/audit_enable_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/audit_list.go b/command/audit_list.go index e5af8525eef3ec..a2dde2180a844f 100644 --- a/command/audit_list.go +++ b/command/audit_list.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/audit_list_test.go b/command/audit_list_test.go index c2e6eacf47b636..9cbb0af5eee312 100644 --- a/command/audit_list_test.go +++ b/command/audit_list_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth.go b/command/auth.go index e2bdb81c1ea2e8..3c47b2b889c46d 100644 --- a/command/auth.go +++ b/command/auth.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_disable.go b/command/auth_disable.go index 735103bdab6010..773486107a5135 100644 --- a/command/auth_disable.go +++ b/command/auth_disable.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_disable_test.go b/command/auth_disable_test.go index 385bc4ec73ac9e..51419b86637a9e 100644 --- a/command/auth_disable_test.go +++ b/command/auth_disable_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_enable.go b/command/auth_enable.go index 7c7af550dbadf2..2970da18e14cec 100644 --- a/command/auth_enable.go +++ b/command/auth_enable.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_enable_test.go b/command/auth_enable_test.go index 4a4292ce80b5b2..e124ac1c0c6dd6 100644 --- a/command/auth_enable_test.go +++ b/command/auth_enable_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_help.go b/command/auth_help.go index 34b6b9ffa930ba..41ea7be5f5d11b 100644 --- a/command/auth_help.go +++ b/command/auth_help.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_help_test.go b/command/auth_help_test.go index a83695ee3e8f7c..0c0d36f168f2ea 100644 --- a/command/auth_help_test.go +++ b/command/auth_help_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_list.go b/command/auth_list.go index 25103a14dc5acd..f8b53d1518f304 100644 --- a/command/auth_list.go +++ b/command/auth_list.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_list_test.go b/command/auth_list_test.go index 2e96f9f2ca0786..decf6e9b06f0a8 100644 --- a/command/auth_list_test.go +++ b/command/auth_list_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_move.go b/command/auth_move.go index 2af5ab65130a15..9e591ba64f0de1 100644 --- a/command/auth_move.go +++ b/command/auth_move.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_move_test.go b/command/auth_move_test.go index 877afd27bfee58..035938efe5aa54 100644 --- a/command/auth_move_test.go +++ b/command/auth_move_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_test.go b/command/auth_test.go index dd8abb07f7e62c..f0fd5d065d8bc9 100644 --- a/command/auth_test.go +++ b/command/auth_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_tune.go b/command/auth_tune.go index a7a09797f90677..6e3d3e7bce8f63 100644 --- a/command/auth_tune.go +++ b/command/auth_tune.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/auth_tune_test.go b/command/auth_tune_test.go index aabcd8396020ea..f4a67f3de59495 100644 --- a/command/auth_tune_test.go +++ b/command/auth_tune_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/base.go b/command/base.go index 641fcb34e94589..1ccad3b2eb3a8e 100644 --- a/command/base.go +++ b/command/base.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -42,20 +39,20 @@ type BaseCommand struct { flags *FlagSets flagsOnce sync.Once - flagAddress string - flagAgentProxyAddress string - flagCACert string - flagCAPath string - flagClientCert string - flagClientKey string - flagNamespace string - flagNS string - flagPolicyOverride bool - flagTLSServerName string - flagTLSSkipVerify bool - flagDisableRedirects bool - flagWrapTTL time.Duration - flagUnlockKey string + flagAddress string + flagAgentAddress string + flagCACert string + flagCAPath string + flagClientCert string + flagClientKey string + flagNamespace string + flagNS string + flagPolicyOverride bool + flagTLSServerName string + flagTLSSkipVerify bool + flagDisableRedirects bool + flagWrapTTL time.Duration + flagUnlockKey string flagFormat string flagField string @@ -90,8 +87,8 @@ func (c *BaseCommand) Client() (*api.Client, error) { if c.flagAddress != "" { config.Address = c.flagAddress } - if c.flagAgentProxyAddress != "" { - config.Address = c.flagAgentProxyAddress + if c.flagAgentAddress != "" { + config.Address = c.flagAgentAddress } if c.flagOutputCurlString { @@ -330,7 +327,7 @@ func (c *BaseCommand) flagSet(bit FlagSetBit) *FlagSets { agentAddrStringVar := &StringVar{ Name: "agent-address", - Target: &c.flagAgentProxyAddress, + Target: &c.flagAgentAddress, EnvVar: api.EnvVaultAgentAddr, Completion: complete.PredictAnything, Usage: "Address of the Agent.", diff --git a/command/base_flags.go b/command/base_flags.go index 3fe069fbb457a6..652ac177c6853e 100644 --- a/command/base_flags.go +++ b/command/base_flags.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/base_flags_test.go b/command/base_flags_test.go index 580e163de1f938..39777d24a95370 100644 --- a/command/base_flags_test.go +++ b/command/base_flags_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/base_helpers.go b/command/base_helpers.go index 2595dc56a6a55c..60ad8af20cb1a5 100644 --- a/command/base_helpers.go +++ b/command/base_helpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/base_helpers_test.go b/command/base_helpers_test.go index 50cd26441ffa70..dee93b4bf44a17 100644 --- a/command/base_helpers_test.go +++ b/command/base_helpers_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/base_predict.go b/command/base_predict.go index ee2a771c7a968a..ebed8132587566 100644 --- a/command/base_predict.go +++ b/command/base_predict.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/base_predict_test.go b/command/base_predict_test.go index 20af0f68810f93..5b96c8d76c2972 100644 --- a/command/base_predict_test.go +++ b/command/base_predict_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/base_test.go b/command/base_test.go index af4f0a4d3b62a6..b3f75e0eb05d77 100644 --- a/command/base_test.go +++ b/command/base_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/command_test.go b/command/command_test.go index f1a5269b9f5fb8..34ef52495ceeae 100644 --- a/command/command_test.go +++ b/command/command_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -71,50 +68,6 @@ func testVaultServer(tb testing.TB) (*api.Client, func()) { return client, closer } -func testVaultServerWithSecrets(ctx context.Context, tb testing.TB) (*api.Client, func()) { - tb.Helper() - - client, _, closer := testVaultServerUnseal(tb) - - // enable kv-v1 backend - if err := client.Sys().Mount("kv-v1/", &api.MountInput{ - Type: "kv-v1", - }); err != nil { - tb.Fatal(err) - } - - // enable kv-v2 backend - if err := client.Sys().Mount("kv-v2/", &api.MountInput{ - Type: "kv-v2", - }); err != nil { - tb.Fatal(err) - } - - // populate dummy secrets - for _, path := range []string{ - "foo", - "app-1/foo", - "app-1/bar", - "app-1/nested/baz", - } { - if err := client.KVv1("kv-v1").Put(ctx, path, map[string]interface{}{ - "user": "test", - "password": "Hashi123", - }); err != nil { - tb.Fatal(err) - } - - if _, err := client.KVv2("kv-v2").Put(ctx, path, map[string]interface{}{ - "user": "test", - "password": "Hashi123", - }); err != nil { - tb.Fatal(err) - } - } - - return client, closer -} - func testVaultServerWithKVVersion(tb testing.TB, kvVersion string) (*api.Client, func()) { tb.Helper() @@ -140,7 +93,7 @@ func testVaultServerAllBackends(tb testing.TB) (*api.Client, func()) { // testVaultServerAutoUnseal creates a test vault cluster and sets it up with auto unseal // the function returns a client, the recovery keys, and a closer function func testVaultServerAutoUnseal(tb testing.TB) (*api.Client, []string, func()) { - testSeal, _ := seal.NewTestSeal(nil) + testSeal := seal.NewTestSeal(nil) autoSeal, err := vault.NewAutoSeal(testSeal) if err != nil { tb.Fatal("unable to create autoseal", err) diff --git a/command/commands.go b/command/commands.go index 68e2542b06db2e..316247d1f644fc 100644 --- a/command/commands.go +++ b/command/commands.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -97,6 +94,10 @@ const ( // system being developed over multiple release cycles. EnvVaultExperiments = "VAULT_EXPERIMENTS" + // DisableSSCTokens is an env var used to disable index bearing + // token functionality + DisableSSCTokens = "VAULT_DISABLE_SERVER_SIDE_CONSISTENT_TOKENS" + // flagNameAddress is the flag used in the base command to read in the // address of the Vault server. flagNameAddress = "address" @@ -264,11 +265,6 @@ func initCommands(ui, serverCmdUi cli.Ui, runOpts *RunOptions) map[string]cli.Co SighupCh: MakeSighupCh(), }, nil }, - "agent generate-config": func() (cli.Command, error) { - return &AgentGenerateConfigCommand{ - BaseCommand: getBaseCommand(), - }, nil - }, "audit": func() (cli.Command, error) { return &AuditCommand{ BaseCommand: getBaseCommand(), @@ -604,15 +600,6 @@ func initCommands(ui, serverCmdUi cli.Ui, runOpts *RunOptions) map[string]cli.Co BaseCommand: getBaseCommand(), }, nil }, - "proxy": func() (cli.Command, error) { - return &ProxyCommand{ - BaseCommand: &BaseCommand{ - UI: serverCmdUi, - }, - ShutdownCh: MakeShutdownCh(), - SighupCh: MakeSighupCh(), - }, nil - }, "policy": func() (cli.Command, error) { return &PolicyCommand{ BaseCommand: getBaseCommand(), diff --git a/command/commands_nonwindows.go b/command/commands_nonwindows.go index f8d128c3fd4254..c94c485f490751 100644 --- a/command/commands_nonwindows.go +++ b/command/commands_nonwindows.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !windows package command diff --git a/command/commands_windows.go b/command/commands_windows.go index 541a6e4aea8ecb..ed06a07406f63d 100644 --- a/command/commands_windows.go +++ b/command/commands_windows.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build windows package command diff --git a/command/config.go b/command/config.go index 3fbc53a9667074..b46581fc80b70f 100644 --- a/command/config.go +++ b/command/config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/config/config.go b/command/config/config.go index 71f9127887d7ac..ef0c4adf6dcd83 100644 --- a/command/config/config.go +++ b/command/config/config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package config import ( diff --git a/command/config/config_test.go b/command/config/config_test.go index fef151622259a5..b5d41361e9b711 100644 --- a/command/config/config_test.go +++ b/command/config/config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package config import ( diff --git a/command/config/util.go b/command/config/util.go index f295f462ae503c..1ac47df7e05333 100644 --- a/command/config/util.go +++ b/command/config/util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package config import ( diff --git a/command/config/validate_listener.go b/command/config/validate_listener.go index e2d27166a04db8..7a56ec69985087 100644 --- a/command/config/validate_listener.go +++ b/command/config/validate_listener.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !fips_140_3 package config diff --git a/command/config_test.go b/command/config_test.go index 787c6795765a03..0ed34992f3a4c2 100644 --- a/command/config_test.go +++ b/command/config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/debug.go b/command/debug.go index e5440b3b88881b..1ea27690171d6e 100644 --- a/command/debug.go +++ b/command/debug.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/debug_test.go b/command/debug_test.go index e63e1ebed0585e..de51c770f5acbf 100644 --- a/command/debug_test.go +++ b/command/debug_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/delete.go b/command/delete.go index 7da6dd2d984795..78d786f11756c3 100644 --- a/command/delete.go +++ b/command/delete.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/delete_test.go b/command/delete_test.go index 629be7abb42c25..e26d393b16fea0 100644 --- a/command/delete_test.go +++ b/command/delete_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/events.go b/command/events.go index 353c97947d96d3..50884803e5b9ec 100644 --- a/command/events.go +++ b/command/events.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/events_test.go b/command/events_test.go index bb2aef0b37baa1..7d7527c7e082c9 100644 --- a/command/events_test.go +++ b/command/events_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/format.go b/command/format.go index 5e42d31d1f9ab9..cdacaab391bbf2 100644 --- a/command/format.go +++ b/command/format.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/format_test.go b/command/format_test.go index 2bdc45ebe57ae7..48d164591b1b3b 100644 --- a/command/format_test.go +++ b/command/format_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/healthcheck/healthcheck.go b/command/healthcheck/healthcheck.go index a6fb2040eedbc0..2ce9c2deea5db5 100644 --- a/command/healthcheck/healthcheck.go +++ b/command/healthcheck/healthcheck.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - /* * The healthcheck package attempts to allow generic checks of arbitrary * engines, while providing a common framework with some performance diff --git a/command/healthcheck/pki.go b/command/healthcheck/pki.go index 42f4fc485865f7..406163b355cb50 100644 --- a/command/healthcheck/pki.go +++ b/command/healthcheck/pki.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_allow_acme_headers.go b/command/healthcheck/pki_allow_acme_headers.go deleted file mode 100644 index 2015ac276c02d4..00000000000000 --- a/command/healthcheck/pki_allow_acme_headers.go +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package healthcheck - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-secure-stdlib/parseutil" - "github.com/hashicorp/go-secure-stdlib/strutil" - "github.com/hashicorp/vault/sdk/logical" -) - -type AllowAcmeHeaders struct { - Enabled bool - UnsupportedVersion bool - - TuneFetcher *PathFetch - TuneData map[string]interface{} - - AcmeConfigFetcher *PathFetch -} - -func NewAllowAcmeHeaders() Check { - return &AllowAcmeHeaders{} -} - -func (h *AllowAcmeHeaders) Name() string { - return "allow_acme_headers" -} - -func (h *AllowAcmeHeaders) IsEnabled() bool { - return h.Enabled -} - -func (h *AllowAcmeHeaders) DefaultConfig() map[string]interface{} { - return map[string]interface{}{} -} - -func (h *AllowAcmeHeaders) LoadConfig(config map[string]interface{}) error { - enabled, err := parseutil.ParseBool(config["enabled"]) - if err != nil { - return fmt.Errorf("error parsing %v.enabled: %w", h.Name(), err) - } - h.Enabled = enabled - - return nil -} - -func (h *AllowAcmeHeaders) FetchResources(e *Executor) error { - var err error - h.AcmeConfigFetcher, err = e.FetchIfNotFetched(logical.ReadOperation, "/{{mount}}/config/acme") - if err != nil { - return err - } - - if h.AcmeConfigFetcher.IsUnsupportedPathError() { - h.UnsupportedVersion = true - } - - _, h.TuneFetcher, h.TuneData, err = fetchMountTune(e, func() { - h.UnsupportedVersion = true - }) - if err != nil { - return err - } - - return nil -} - -func (h *AllowAcmeHeaders) Evaluate(e *Executor) ([]*Result, error) { - if h.UnsupportedVersion { - ret := Result{ - Status: ResultInvalidVersion, - Endpoint: h.AcmeConfigFetcher.Path, - Message: "This health check requires Vault 1.14+ but an earlier version of Vault Server was contacted, preventing this health check from running.", - } - return []*Result{&ret}, nil - } - - if h.AcmeConfigFetcher.IsSecretPermissionsError() { - msg := "Without read access to ACME configuration, this health check is unable to function." - return craftInsufficientPermissionResult(e, h.AcmeConfigFetcher.Path, msg), nil - } - - acmeEnabled, err := isAcmeEnabled(h.AcmeConfigFetcher) - if err != nil { - return nil, err - } - - if !acmeEnabled { - ret := Result{ - Status: ResultNotApplicable, - Endpoint: h.AcmeConfigFetcher.Path, - Message: "ACME is not enabled, no additional response headers required.", - } - return []*Result{&ret}, nil - } - - if h.TuneFetcher.IsSecretPermissionsError() { - msg := "Without access to mount tune information, this health check is unable to function." - return craftInsufficientPermissionResult(e, h.TuneFetcher.Path, msg), nil - } - - resp, err := StringList(h.TuneData["allowed_response_headers"]) - if err != nil { - return nil, fmt.Errorf("unable to parse value from server for allowed_response_headers: %w", err) - } - - requiredResponseHeaders := []string{"Replay-Nonce", "Link", "Location"} - foundResponseHeaders := []string{} - for _, param := range resp { - for _, reqHeader := range requiredResponseHeaders { - if strings.EqualFold(param, reqHeader) { - foundResponseHeaders = append(foundResponseHeaders, reqHeader) - break - } - } - } - - foundAllHeaders := strutil.EquivalentSlices(requiredResponseHeaders, foundResponseHeaders) - - if !foundAllHeaders { - ret := Result{ - Status: ResultWarning, - Endpoint: "/sys/mounts/{{mount}}/tune", - Message: "Mount hasn't enabled 'Replay-Nonce', 'Link', 'Location' response headers, these are required for ACME to function.", - } - return []*Result{&ret}, nil - } - - ret := Result{ - Status: ResultOK, - Endpoint: "/sys/mounts/{{mount}}/tune", - Message: "Mount has enabled 'Replay-Nonce', 'Link', 'Location' response headers.", - } - return []*Result{&ret}, nil -} - -func craftInsufficientPermissionResult(e *Executor, path, errorMsg string) []*Result { - ret := Result{ - Status: ResultInsufficientPermissions, - Endpoint: path, - Message: errorMsg, - } - - if e.Client.Token() == "" { - ret.Message = "No token available so unable read the tune endpoint for this mount. " + ret.Message - } else { - ret.Message = "This token lacks permission to read the tune endpoint for this mount. " + ret.Message - } - - return []*Result{&ret} -} diff --git a/command/healthcheck/pki_allow_if_modified_since.go b/command/healthcheck/pki_allow_if_modified_since.go index bb5306e054188d..1cff1cda5d5823 100644 --- a/command/healthcheck/pki_allow_if_modified_since.go +++ b/command/healthcheck/pki_allow_if_modified_since.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_audit_visibility.go b/command/healthcheck/pki_audit_visibility.go index 6b3834fb0c9e76..1984fb97d7ad45 100644 --- a/command/healthcheck/pki_audit_visibility.go +++ b/command/healthcheck/pki_audit_visibility.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_ca_validity_period.go b/command/healthcheck/pki_ca_validity_period.go index 511de757061d85..c971aed5220892 100644 --- a/command/healthcheck/pki_ca_validity_period.go +++ b/command/healthcheck/pki_ca_validity_period.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_crl_validity_period.go b/command/healthcheck/pki_crl_validity_period.go index 8450a058f994de..bd8e856baa20e3 100644 --- a/command/healthcheck/pki_crl_validity_period.go +++ b/command/healthcheck/pki_crl_validity_period.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_enable_acme_issuance.go b/command/healthcheck/pki_enable_acme_issuance.go deleted file mode 100644 index 986165d0e253eb..00000000000000 --- a/command/healthcheck/pki_enable_acme_issuance.go +++ /dev/null @@ -1,237 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package healthcheck - -import ( - "bytes" - "context" - "crypto/tls" - "fmt" - "net/http" - "net/url" - - "github.com/hashicorp/go-secure-stdlib/parseutil" - "github.com/hashicorp/vault/sdk/logical" - - "golang.org/x/crypto/acme" -) - -type EnableAcmeIssuance struct { - Enabled bool - UnsupportedVersion bool - - AcmeConfigFetcher *PathFetch - ClusterConfigFetcher *PathFetch - TotalIssuers int - RootIssuers int -} - -func NewEnableAcmeIssuance() Check { - return &EnableAcmeIssuance{} -} - -func (h *EnableAcmeIssuance) Name() string { - return "enable_acme_issuance" -} - -func (h *EnableAcmeIssuance) IsEnabled() bool { - return h.Enabled -} - -func (h *EnableAcmeIssuance) DefaultConfig() map[string]interface{} { - return map[string]interface{}{} -} - -func (h *EnableAcmeIssuance) LoadConfig(config map[string]interface{}) error { - enabled, err := parseutil.ParseBool(config["enabled"]) - if err != nil { - return fmt.Errorf("error parsing %v.enabled: %w", h.Name(), err) - } - h.Enabled = enabled - - return nil -} - -func (h *EnableAcmeIssuance) FetchResources(e *Executor) error { - var err error - h.AcmeConfigFetcher, err = e.FetchIfNotFetched(logical.ReadOperation, "/{{mount}}/config/acme") - if err != nil { - return err - } - - if h.AcmeConfigFetcher.IsUnsupportedPathError() { - h.UnsupportedVersion = true - } - - h.ClusterConfigFetcher, err = e.FetchIfNotFetched(logical.ReadOperation, "/{{mount}}/config/cluster") - if err != nil { - return err - } - - if h.ClusterConfigFetcher.IsUnsupportedPathError() { - h.UnsupportedVersion = true - } - - h.TotalIssuers, h.RootIssuers, err = doesMountContainOnlyRootIssuers(e) - - return nil -} - -func doesMountContainOnlyRootIssuers(e *Executor) (int, int, error) { - exit, _, issuers, err := pkiFetchIssuersList(e, func() {}) - if exit || err != nil { - return 0, 0, err - } - - totalIssuers := 0 - rootIssuers := 0 - - for _, issuer := range issuers { - skip, _, cert, err := pkiFetchIssuer(e, issuer, func() {}) - - if skip || err != nil { - if err != nil { - return 0, 0, err - } - continue - } - totalIssuers++ - - if !bytes.Equal(cert.RawSubject, cert.RawIssuer) { - continue - } - if err := cert.CheckSignatureFrom(cert); err != nil { - continue - } - rootIssuers++ - } - - return totalIssuers, rootIssuers, nil -} - -func isAcmeEnabled(fetcher *PathFetch) (bool, error) { - isEnabledRaw, ok := fetcher.Secret.Data["enabled"] - if !ok { - return false, fmt.Errorf("enabled configuration field missing from acme config") - } - - parseBool, err := parseutil.ParseBool(isEnabledRaw) - if err != nil { - return false, fmt.Errorf("failed parsing 'enabled' field from ACME config: %w", err) - } - - return parseBool, nil -} - -func verifyLocalPathUrl(h *EnableAcmeIssuance) error { - localPathRaw, ok := h.ClusterConfigFetcher.Secret.Data["path"] - if !ok { - return fmt.Errorf("'path' field missing from config") - } - - localPath, err := parseutil.ParseString(localPathRaw) - if err != nil { - return fmt.Errorf("failed converting 'path' field from local config: %w", err) - } - - if localPath == "" { - return fmt.Errorf("'path' field not configured within /{{mount}}/config/cluster") - } - - parsedUrl, err := url.Parse(localPath) - if err != nil { - return fmt.Errorf("failed to parse URL from path config: %v: %w", localPathRaw, err) - } - - if parsedUrl.Scheme != "https" { - return fmt.Errorf("the configured 'path' field in /{{mount}}/config/cluster was not using an https scheme") - } - - // Avoid issues with SSL certificates for this check, we just want to validate that we would - // hit an ACME server with the path they specified in configuration - tr := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - } - client := &http.Client{Transport: tr} - acmeDirectoryUrl := parsedUrl.JoinPath("/acme/", "directory") - acmeClient := acme.Client{HTTPClient: client, DirectoryURL: acmeDirectoryUrl.String()} - _, err = acmeClient.Discover(context.Background()) - if err != nil { - return fmt.Errorf("using configured 'path' field ('%s') in /{{mount}}/config/cluster failed to reach the ACME"+ - " directory: %s: %w", parsedUrl.String(), acmeDirectoryUrl.String(), err) - } - - return nil -} - -func (h *EnableAcmeIssuance) Evaluate(e *Executor) (results []*Result, err error) { - if h.UnsupportedVersion { - ret := Result{ - Status: ResultInvalidVersion, - Endpoint: h.AcmeConfigFetcher.Path, - Message: "This health check requires Vault 1.14+ but an earlier version of Vault Server was contacted, preventing this health check from running.", - } - return []*Result{&ret}, nil - } - - if h.AcmeConfigFetcher.IsSecretPermissionsError() { - msg := "Without this information, this health check is unable to function." - return craftInsufficientPermissionResult(e, h.AcmeConfigFetcher.Path, msg), nil - } - - acmeEnabled, err := isAcmeEnabled(h.AcmeConfigFetcher) - if err != nil { - return nil, err - } - - if !acmeEnabled { - if h.TotalIssuers == 0 { - ret := Result{ - Status: ResultNotApplicable, - Endpoint: h.AcmeConfigFetcher.Path, - Message: "No issuers in mount, ACME is not required.", - } - return []*Result{&ret}, nil - } - - if h.TotalIssuers == h.RootIssuers { - ret := Result{ - Status: ResultNotApplicable, - Endpoint: h.AcmeConfigFetcher.Path, - Message: "Mount contains only root issuers, ACME is not required.", - } - return []*Result{&ret}, nil - } - - ret := Result{ - Status: ResultInformational, - Endpoint: h.AcmeConfigFetcher.Path, - Message: "Consider enabling ACME support to support a self-rotating PKI infrastructure.", - } - return []*Result{&ret}, nil - } - - if h.ClusterConfigFetcher.IsSecretPermissionsError() { - msg := "Without this information, this health check is unable to function." - return craftInsufficientPermissionResult(e, h.ClusterConfigFetcher.Path, msg), nil - } - - localPathIssue := verifyLocalPathUrl(h) - - if localPathIssue != nil { - ret := Result{ - Status: ResultWarning, - Endpoint: h.ClusterConfigFetcher.Path, - Message: "ACME enabled in config but not functional: " + localPathIssue.Error(), - } - return []*Result{&ret}, nil - } - - ret := Result{ - Status: ResultOK, - Endpoint: h.ClusterConfigFetcher.Path, - Message: "ACME enabled and successfully connected to the ACME directory.", - } - return []*Result{&ret}, nil -} diff --git a/command/healthcheck/pki_enable_auto_tidy.go b/command/healthcheck/pki_enable_auto_tidy.go index 1734d1adcbf15c..dca555fd0326cb 100644 --- a/command/healthcheck/pki_enable_auto_tidy.go +++ b/command/healthcheck/pki_enable_auto_tidy.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_hardware_backed_root.go b/command/healthcheck/pki_hardware_backed_root.go index 2fdda6e4a62fb6..89d3550eaf9b43 100644 --- a/command/healthcheck/pki_hardware_backed_root.go +++ b/command/healthcheck/pki_hardware_backed_root.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_role_allows_glob_wildcards.go b/command/healthcheck/pki_role_allows_glob_wildcards.go index 83c55c23856d50..34fb09927ce5aa 100644 --- a/command/healthcheck/pki_role_allows_glob_wildcards.go +++ b/command/healthcheck/pki_role_allows_glob_wildcards.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_role_allows_localhost.go b/command/healthcheck/pki_role_allows_localhost.go index 0c9b780abef788..568aa3a5f85789 100644 --- a/command/healthcheck/pki_role_allows_localhost.go +++ b/command/healthcheck/pki_role_allows_localhost.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_role_no_store_false.go b/command/healthcheck/pki_role_no_store_false.go index 882955e2bd5713..4fa7ba5ac68d5a 100644 --- a/command/healthcheck/pki_role_no_store_false.go +++ b/command/healthcheck/pki_role_no_store_false.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_root_issued_leaves.go b/command/healthcheck/pki_root_issued_leaves.go index 615684b0df494c..e858794b621c63 100644 --- a/command/healthcheck/pki_root_issued_leaves.go +++ b/command/healthcheck/pki_root_issued_leaves.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_tidy_last_run.go b/command/healthcheck/pki_tidy_last_run.go index 9d07369c877d89..6fed74d33964c5 100644 --- a/command/healthcheck/pki_tidy_last_run.go +++ b/command/healthcheck/pki_tidy_last_run.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/pki_too_many_certs.go b/command/healthcheck/pki_too_many_certs.go index 59722ab2eff016..6b07b5dfe38ce4 100644 --- a/command/healthcheck/pki_too_many_certs.go +++ b/command/healthcheck/pki_too_many_certs.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/shared.go b/command/healthcheck/shared.go index 4097704da2c494..9f2b05051766d9 100644 --- a/command/healthcheck/shared.go +++ b/command/healthcheck/shared.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/healthcheck/util.go b/command/healthcheck/util.go index dd5d66e88573df..632fe1a286a354 100644 --- a/command/healthcheck/util.go +++ b/command/healthcheck/util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package healthcheck import ( diff --git a/command/kv.go b/command/kv.go index b0834c78c71dcb..2172576dbd6fa0 100644 --- a/command/kv.go +++ b/command/kv.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/kv_delete.go b/command/kv_delete.go index a365c891630826..5555b9c71a8953 100644 --- a/command/kv_delete.go +++ b/command/kv_delete.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -156,7 +153,7 @@ func (c *KVDeleteCommand) Run(args []string) int { var fullPath string if v2 { secret, err = c.deleteV2(partialPath, mountPath, client) - fullPath = addPrefixToKVPath(partialPath, mountPath, "data", false) + fullPath = addPrefixToKVPath(partialPath, mountPath, "data") } else { // v1 if mountFlagSyntax { @@ -195,13 +192,13 @@ func (c *KVDeleteCommand) deleteV2(path, mountPath string, client *api.Client) ( var secret *api.Secret switch { case len(c.flagVersions) > 0: - path = addPrefixToKVPath(path, mountPath, "delete", false) + path = addPrefixToKVPath(path, mountPath, "delete") data := map[string]interface{}{ "versions": kvParseVersionsFlags(c.flagVersions), } secret, err = client.Logical().Write(path, data) default: - path = addPrefixToKVPath(path, mountPath, "data", false) + path = addPrefixToKVPath(path, mountPath, "data") secret, err = client.Logical().Delete(path) } diff --git a/command/kv_destroy.go b/command/kv_destroy.go index 1167ec814838e7..45cbca02518bcc 100644 --- a/command/kv_destroy.go +++ b/command/kv_destroy.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -155,7 +152,7 @@ func (c *KVDestroyCommand) Run(args []string) int { c.UI.Error("Destroy not supported on KV Version 1") return 1 } - destroyPath := addPrefixToKVPath(partialPath, mountPath, "destroy", false) + destroyPath := addPrefixToKVPath(partialPath, mountPath, "destroy") if err != nil { c.UI.Error(err.Error()) return 2 diff --git a/command/kv_enable_versioning.go b/command/kv_enable_versioning.go index 8282dd296ff507..9c2a601432847d 100644 --- a/command/kv_enable_versioning.go +++ b/command/kv_enable_versioning.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/kv_get.go b/command/kv_get.go index 1d0e330e71c49c..057a787c4e286e 100644 --- a/command/kv_get.go +++ b/command/kv_get.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -152,7 +149,7 @@ func (c *KVGetCommand) Run(args []string) int { var fullPath string // Add /data to v2 paths only if v2 { - fullPath = addPrefixToKVPath(partialPath, mountPath, "data", false) + fullPath = addPrefixToKVPath(partialPath, mountPath, "data") if c.flagVersion > 0 { versionParam = map[string]string{ diff --git a/command/kv_helpers.go b/command/kv_helpers.go index adf2ec3fdcedc2..b362c3bb071360 100644 --- a/command/kv_helpers.go +++ b/command/kv_helpers.go @@ -1,15 +1,10 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( - "context" "errors" "fmt" "io" - paths "path" - "sort" + "path" "strings" "github.com/hashicorp/go-secure-stdlib/strutil" @@ -80,9 +75,7 @@ func kvPreflightVersionRequest(client *api.Client, path string) (string, int, er err = fmt.Errorf( `This output flag requires the success of a preflight request to determine the version of a KV secrets engine. Please -re-run this command with a token with read access to %s. -Note that if the path you are trying to reach is a KV v2 path, your token's policy must -allow read access to that path in the format 'mount-path/data/foo', not just 'mount-path/foo'.`, path) +re-run this command with a token with read access to %s`, path) } } @@ -128,15 +121,15 @@ func isKVv2(path string, client *api.Client) (string, bool, error) { return mountPath, version == 2, nil } -func addPrefixToKVPath(path, mountPath, apiPrefix string, skipIfExists bool) string { - if path == mountPath || path == strings.TrimSuffix(mountPath, "/") { - return paths.Join(mountPath, apiPrefix) +func addPrefixToKVPath(p, mountPath, apiPrefix string) string { + if p == mountPath || p == strings.TrimSuffix(mountPath, "/") { + return path.Join(mountPath, apiPrefix) } - pathSuffix := strings.TrimPrefix(path, mountPath) + tp := strings.TrimPrefix(p, mountPath) for { // If the entire mountPath is included in the path, we are done - if pathSuffix != path { + if tp != p { break } // Trim the parts of the mountPath that are not included in the @@ -147,16 +140,10 @@ func addPrefixToKVPath(path, mountPath, apiPrefix string, skipIfExists bool) str break } mountPath = strings.TrimSuffix(partialMountPath[1], "/") - pathSuffix = strings.TrimPrefix(pathSuffix, mountPath) - } - - if skipIfExists { - if strings.HasPrefix(pathSuffix, apiPrefix) || strings.HasPrefix(pathSuffix, "/"+apiPrefix) { - return paths.Join(mountPath, pathSuffix) - } + tp = strings.TrimPrefix(tp, mountPath) } - return paths.Join(mountPath, apiPrefix, pathSuffix) + return path.Join(mountPath, apiPrefix, tp) } func getHeaderForMap(header string, data map[string]interface{}) string { @@ -205,65 +192,3 @@ func padEqualSigns(header string, totalLen int) string { return fmt.Sprintf("%s %s %s", strings.Repeat("=", equalSigns/2), header, strings.Repeat("=", equalSigns/2)) } - -// walkSecretsTree dfs-traverses the secrets tree rooted at the given path -// and calls the `visit` functor for each of the directory and leaf paths. -// Note: for kv-v2, a "metadata" path is expected and "metadata" paths will be -// returned in the visit functor. -func walkSecretsTree(ctx context.Context, client *api.Client, path string, visit func(path string, directory bool) error) error { - resp, err := client.Logical().ListWithContext(ctx, path) - if err != nil { - return fmt.Errorf("could not list %q path: %w", path, err) - } - - if resp == nil || resp.Data == nil { - return fmt.Errorf("no value found at %q: %w", path, err) - } - - keysRaw, ok := resp.Data["keys"] - if !ok { - return fmt.Errorf("unexpected list response at %q", path) - } - - keysRawSlice, ok := keysRaw.([]interface{}) - if !ok { - return fmt.Errorf("unexpected list response type %T at %q", keysRaw, path) - } - - keys := make([]string, 0, len(keysRawSlice)) - - for _, keyRaw := range keysRawSlice { - key, ok := keyRaw.(string) - if !ok { - return fmt.Errorf("unexpected key type %T at %q", keyRaw, path) - } - keys = append(keys, key) - } - - // sort the keys for a deterministic output - sort.Strings(keys) - - for _, key := range keys { - // the keys are relative to the current path: combine them - child := paths.Join(path, key) - - if strings.HasSuffix(key, "/") { - // visit the directory - if err := visit(child, true); err != nil { - return err - } - - // this is not a leaf node: we need to go deeper... - if err := walkSecretsTree(ctx, client, child, visit); err != nil { - return err - } - } else { - // this is a leaf node: add it to the list - if err := visit(child, false); err != nil { - return err - } - } - } - - return nil -} diff --git a/command/kv_helpers_test.go b/command/kv_helpers_test.go deleted file mode 100644 index ca5f7e8c5ac645..00000000000000 --- a/command/kv_helpers_test.go +++ /dev/null @@ -1,272 +0,0 @@ -package command - -import ( - "context" - "reflect" - "testing" - "time" - - "github.com/hashicorp/vault/api" -) - -// TestAddPrefixToKVPath tests the addPrefixToKVPath helper function -func TestAddPrefixToKVPath(t *testing.T) { - cases := map[string]struct { - path string - mountPath string - apiPrefix string - skipIfExists bool - expected string - }{ - "simple": { - path: "kv-v2/foo", - mountPath: "kv-v2/", - apiPrefix: "data", - skipIfExists: false, - expected: "kv-v2/data/foo", - }, - - "multi-part": { - path: "my/kv-v2/mount/path/foo/bar/baz", - mountPath: "my/kv-v2/mount/path", - apiPrefix: "metadata", - skipIfExists: false, - expected: "my/kv-v2/mount/path/metadata/foo/bar/baz", - }, - - "with-namespace": { - path: "my/kv-v2/mount/path/foo/bar/baz", - mountPath: "my/ns1/my/kv-v2/mount/path", - apiPrefix: "metadata", - skipIfExists: false, - expected: "my/kv-v2/mount/path/metadata/foo/bar/baz", - }, - - "skip-if-exists-true": { - path: "kv-v2/data/foo", - mountPath: "kv-v2/", - apiPrefix: "data", - skipIfExists: true, - expected: "kv-v2/data/foo", - }, - - "skip-if-exists-false": { - path: "kv-v2/data/foo", - mountPath: "kv-v2", - apiPrefix: "data", - skipIfExists: false, - expected: "kv-v2/data/data/foo", - }, - - "skip-if-exists-with-namespace": { - path: "my/kv-v2/mount/path/metadata/foo/bar/baz", - mountPath: "my/ns1/my/kv-v2/mount/path", - apiPrefix: "metadata", - skipIfExists: true, - expected: "my/kv-v2/mount/path/metadata/foo/bar/baz", - }, - } - - for name, tc := range cases { - name, tc := name, tc - t.Run(name, func(t *testing.T) { - t.Parallel() - - actual := addPrefixToKVPath( - tc.path, - tc.mountPath, - tc.apiPrefix, - tc.skipIfExists, - ) - - if tc.expected != actual { - t.Fatalf("unexpected output; want: %v, got: %v", tc.expected, actual) - } - }) - } -} - -// TestWalkSecretsTree tests the walkSecretsTree helper function -func TestWalkSecretsTree(t *testing.T) { - // test setup - client, closer := testVaultServer(t) - defer closer() - - // enable kv-v1 backend - if err := client.Sys().Mount("kv-v1/", &api.MountInput{ - Type: "kv-v1", - }); err != nil { - t.Fatal(err) - } - time.Sleep(time.Second) - - // enable kv-v2 backend - if err := client.Sys().Mount("kv-v2/", &api.MountInput{ - Type: "kv-v2", - }); err != nil { - t.Fatal(err) - } - time.Sleep(time.Second) - - ctx, cancelContextFunc := context.WithTimeout(context.Background(), 5*time.Second) - defer cancelContextFunc() - - // populate secrets - for _, path := range []string{ - "foo", - "app-1/foo", - "app-1/bar", - "app-1/nested/x/y/z", - "app-1/nested/x/y", - "app-1/nested/bar", - } { - if err := client.KVv1("kv-v1").Put(ctx, path, map[string]interface{}{ - "password": "Hashi123", - }); err != nil { - t.Fatal(err) - } - - if _, err := client.KVv2("kv-v2").Put(ctx, path, map[string]interface{}{ - "password": "Hashi123", - }); err != nil { - t.Fatal(err) - } - } - - type treePath struct { - path string - directory bool - } - - cases := map[string]struct { - path string - expected []treePath - expectedError bool - }{ - "kv-v1-simple": { - path: "kv-v1/app-1/nested/x/y", - expected: []treePath{ - {path: "kv-v1/app-1/nested/x/y/z", directory: false}, - }, - expectedError: false, - }, - - "kv-v2-simple": { - path: "kv-v2/metadata/app-1/nested/x/y", - expected: []treePath{ - {path: "kv-v2/metadata/app-1/nested/x/y/z", directory: false}, - }, - expectedError: false, - }, - - "kv-v1-nested": { - path: "kv-v1/app-1/nested/", - expected: []treePath{ - {path: "kv-v1/app-1/nested/bar", directory: false}, - {path: "kv-v1/app-1/nested/x", directory: true}, - {path: "kv-v1/app-1/nested/x/y", directory: false}, - {path: "kv-v1/app-1/nested/x/y", directory: true}, - {path: "kv-v1/app-1/nested/x/y/z", directory: false}, - }, - expectedError: false, - }, - - "kv-v2-nested": { - path: "kv-v2/metadata/app-1/nested/", - expected: []treePath{ - {path: "kv-v2/metadata/app-1/nested/bar", directory: false}, - {path: "kv-v2/metadata/app-1/nested/x", directory: true}, - {path: "kv-v2/metadata/app-1/nested/x/y", directory: false}, - {path: "kv-v2/metadata/app-1/nested/x/y", directory: true}, - {path: "kv-v2/metadata/app-1/nested/x/y/z", directory: false}, - }, - expectedError: false, - }, - - "kv-v1-all": { - path: "kv-v1", - expected: []treePath{ - {path: "kv-v1/app-1", directory: true}, - {path: "kv-v1/app-1/bar", directory: false}, - {path: "kv-v1/app-1/foo", directory: false}, - {path: "kv-v1/app-1/nested", directory: true}, - {path: "kv-v1/app-1/nested/bar", directory: false}, - {path: "kv-v1/app-1/nested/x", directory: true}, - {path: "kv-v1/app-1/nested/x/y", directory: false}, - {path: "kv-v1/app-1/nested/x/y", directory: true}, - {path: "kv-v1/app-1/nested/x/y/z", directory: false}, - {path: "kv-v1/foo", directory: false}, - }, - expectedError: false, - }, - - "kv-v2-all": { - path: "kv-v2/metadata", - expected: []treePath{ - {path: "kv-v2/metadata/app-1", directory: true}, - {path: "kv-v2/metadata/app-1/bar", directory: false}, - {path: "kv-v2/metadata/app-1/foo", directory: false}, - {path: "kv-v2/metadata/app-1/nested", directory: true}, - {path: "kv-v2/metadata/app-1/nested/bar", directory: false}, - {path: "kv-v2/metadata/app-1/nested/x", directory: true}, - {path: "kv-v2/metadata/app-1/nested/x/y", directory: false}, - {path: "kv-v2/metadata/app-1/nested/x/y", directory: true}, - {path: "kv-v2/metadata/app-1/nested/x/y/z", directory: false}, - {path: "kv-v2/metadata/foo", directory: false}, - }, - expectedError: false, - }, - - "kv-v1-not-found": { - path: "kv-v1/does/not/exist", - expected: nil, - expectedError: true, - }, - - "kv-v2-not-found": { - path: "kv-v2/metadata/does/not/exist", - expected: nil, - expectedError: true, - }, - - "kv-v1-not-listable-leaf-node": { - path: "kv-v1/foo", - expected: nil, - expectedError: true, - }, - - "kv-v2-not-listable-leaf-node": { - path: "kv-v2/metadata/foo", - expected: nil, - expectedError: true, - }, - } - - for name, tc := range cases { - t.Run(name, func(t *testing.T) { - var descendants []treePath - - err := walkSecretsTree(ctx, client, tc.path, func(path string, directory bool) error { - descendants = append(descendants, treePath{ - path: path, - directory: directory, - }) - return nil - }) - - if tc.expectedError { - if err == nil { - t.Fatal("an error was expected but the test succeeded") - } - } else { - if err != nil { - t.Fatal(err) - } - - if !reflect.DeepEqual(tc.expected, descendants) { - t.Fatalf("unexpected list output; want: %v, got: %v", tc.expected, descendants) - } - } - }) - } -} diff --git a/command/kv_list.go b/command/kv_list.go index 25ad4d2a7058b8..b6b665c6f55ed8 100644 --- a/command/kv_list.go +++ b/command/kv_list.go @@ -1,11 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( "fmt" - "path" "strings" "github.com/mitchellh/cli" @@ -19,7 +15,6 @@ var ( type KVListCommand struct { *BaseCommand - flagMount string } func (c *KVListCommand) Synopsis() string { @@ -45,23 +40,7 @@ Usage: vault kv list [options] PATH } func (c *KVListCommand) Flags() *FlagSets { - set := c.flagSet(FlagSetHTTP | FlagSetOutputFormat) - - // Common Options - f := set.NewFlagSet("Common Options") - - f.StringVar(&StringVar{ - Name: "mount", - Target: &c.flagMount, - Default: "", // no default, because the handling of the next arg is determined by whether this flag has a value - Usage: `Specifies the path where the KV backend is mounted. If specified, - the next argument will be interpreted as the secret path. If this flag is - not specified, the next argument will be interpreted as the combined mount - path and secret path, with /data/ automatically appended between KV - v2 secrets.`, - }) - - return set + return c.flagSet(FlagSetHTTP | FlagSetOutputFormat) } func (c *KVListCommand) AutocompleteArgs() complete.Predictor { @@ -83,11 +62,8 @@ func (c *KVListCommand) Run(args []string) int { args = f.Args() switch { case len(args) < 1: - if c.flagMount == "" { - c.UI.Error(fmt.Sprintf("Not enough arguments (expected 1, got %d)", len(args))) - return 1 - } - args = []string{""} + c.UI.Error(fmt.Sprintf("Not enough arguments (expected 1, got %d)", len(args))) + return 1 case len(args) > 1: c.UI.Error(fmt.Sprintf("Too many arguments (expected 1, got %d)", len(args))) return 1 @@ -99,56 +75,31 @@ func (c *KVListCommand) Run(args []string) int { return 2 } - // If true, we're working with "-mount=secret foo" syntax. - // If false, we're using "secret/foo" syntax. - mountFlagSyntax := c.flagMount != "" - - var ( - mountPath string - partialPath string - v2 bool - ) - - // Parse the paths and grab the KV version - if mountFlagSyntax { - // In this case, this arg is the secret path (e.g. "foo"). - partialPath = sanitizePath(args[0]) - mountPath, v2, err = isKVv2(sanitizePath(c.flagMount), client) - if err != nil { - c.UI.Error(err.Error()) - return 2 - } + // Append trailing slash + path := args[0] + if !strings.HasSuffix(path, "/") { + path += "/" + } - if v2 { - partialPath = path.Join(mountPath, partialPath) - } - } else { - // In this case, this arg is a path-like combination of mountPath/secretPath. - // (e.g. "secret/foo") - partialPath = sanitizePath(args[0]) - mountPath, v2, err = isKVv2(partialPath, client) - if err != nil { - c.UI.Error(err.Error()) - return 2 - } + // Sanitize path + path = sanitizePath(path) + mountPath, v2, err := isKVv2(path, client) + if err != nil { + c.UI.Error(err.Error()) + return 2 } - // Add /metadata to v2 paths only - var fullPath string if v2 { - fullPath = addPrefixToKVPath(partialPath, mountPath, "metadata", false) - } else { - // v1 - if mountFlagSyntax { - fullPath = path.Join(mountPath, partialPath) - } else { - fullPath = partialPath + path = addPrefixToKVPath(path, mountPath, "metadata") + if err != nil { + c.UI.Error(err.Error()) + return 2 } } - secret, err := client.Logical().List(fullPath) + secret, err := client.Logical().List(path) if err != nil { - c.UI.Error(fmt.Sprintf("Error listing %s: %s", fullPath, err)) + c.UI.Error(fmt.Sprintf("Error listing %s: %s", path, err)) return 2 } @@ -166,12 +117,12 @@ func (c *KVListCommand) Run(args []string) int { } if secret == nil || secret.Data == nil { - c.UI.Error(fmt.Sprintf("No value found at %s", fullPath)) + c.UI.Error(fmt.Sprintf("No value found at %s", path)) return 2 } if !ok { - c.UI.Error(fmt.Sprintf("No entries found at %s", fullPath)) + c.UI.Error(fmt.Sprintf("No entries found at %s", path)) return 2 } diff --git a/command/kv_metadata.go b/command/kv_metadata.go index 4350311aff3264..c4ab37910555a0 100644 --- a/command/kv_metadata.go +++ b/command/kv_metadata.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/kv_metadata_delete.go b/command/kv_metadata_delete.go index 6217506f23b07d..cff16f21c6fe2f 100644 --- a/command/kv_metadata_delete.go +++ b/command/kv_metadata_delete.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -138,7 +135,7 @@ func (c *KVMetadataDeleteCommand) Run(args []string) int { return 1 } - fullPath := addPrefixToKVPath(partialPath, mountPath, "metadata", false) + fullPath := addPrefixToKVPath(partialPath, mountPath, "metadata") if secret, err := client.Logical().Delete(fullPath); err != nil { c.UI.Error(fmt.Sprintf("Error deleting %s: %s", fullPath, err)) if secret != nil { diff --git a/command/kv_metadata_get.go b/command/kv_metadata_get.go index 8d17210741f944..8920340752d5a6 100644 --- a/command/kv_metadata_get.go +++ b/command/kv_metadata_get.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -140,7 +137,7 @@ func (c *KVMetadataGetCommand) Run(args []string) int { return 1 } - fullPath := addPrefixToKVPath(partialPath, mountPath, "metadata", false) + fullPath := addPrefixToKVPath(partialPath, mountPath, "metadata") secret, err := client.Logical().Read(fullPath) if err != nil { c.UI.Error(fmt.Sprintf("Error reading %s: %s", fullPath, err)) diff --git a/command/kv_metadata_patch.go b/command/kv_metadata_patch.go index 60d1a3e8b20fef..11ffdb4bea262f 100644 --- a/command/kv_metadata_patch.go +++ b/command/kv_metadata_patch.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -211,7 +208,7 @@ func (c *KVMetadataPatchCommand) Run(args []string) int { return 1 } - fullPath := addPrefixToKVPath(partialPath, mountPath, "metadata", false) + fullPath := addPrefixToKVPath(partialPath, mountPath, "metadata") data := make(map[string]interface{}, 0) diff --git a/command/kv_metadata_patch_test.go b/command/kv_metadata_patch_test.go index 58f4f915233212..3b15c520294c15 100644 --- a/command/kv_metadata_patch_test.go +++ b/command/kv_metadata_patch_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/kv_metadata_put.go b/command/kv_metadata_put.go index 9ec43e0baec18f..5196b1c79a0a69 100644 --- a/command/kv_metadata_put.go +++ b/command/kv_metadata_put.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -199,7 +196,7 @@ func (c *KVMetadataPutCommand) Run(args []string) int { return 1 } - fullPath := addPrefixToKVPath(partialPath, mountPath, "metadata", false) + fullPath := addPrefixToKVPath(partialPath, mountPath, "metadata") data := map[string]interface{}{} if c.flagMaxVersions >= 0 { diff --git a/command/kv_metadata_put_test.go b/command/kv_metadata_put_test.go index 008ded9fdb8d83..a952802cc4699a 100644 --- a/command/kv_metadata_put_test.go +++ b/command/kv_metadata_put_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/kv_patch.go b/command/kv_patch.go index 3f5080d4de70aa..8313ec33487c7e 100644 --- a/command/kv_patch.go +++ b/command/kv_patch.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -220,7 +217,7 @@ func (c *KVPatchCommand) Run(args []string) int { return 2 } - fullPath := addPrefixToKVPath(partialPath, mountPath, "data", false) + fullPath := addPrefixToKVPath(partialPath, mountPath, "data") if err != nil { c.UI.Error(err.Error()) return 2 diff --git a/command/kv_put.go b/command/kv_put.go index 77c0c6a1129e2f..5cc7b6fbc67b89 100644 --- a/command/kv_put.go +++ b/command/kv_put.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -181,7 +178,7 @@ func (c *KVPutCommand) Run(args []string) int { // Add /data to v2 paths only var fullPath string if v2 { - fullPath = addPrefixToKVPath(partialPath, mountPath, "data", false) + fullPath = addPrefixToKVPath(partialPath, mountPath, "data") data = map[string]interface{}{ "data": data, "options": map[string]interface{}{}, diff --git a/command/kv_rollback.go b/command/kv_rollback.go index c54d7bc2f6352c..0d782619a83209 100644 --- a/command/kv_rollback.go +++ b/command/kv_rollback.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -164,7 +161,7 @@ func (c *KVRollbackCommand) Run(args []string) int { return 2 } - fullPath := addPrefixToKVPath(partialPath, mountPath, "data", false) + fullPath := addPrefixToKVPath(partialPath, mountPath, "data") if err != nil { c.UI.Error(err.Error()) return 2 diff --git a/command/kv_test.go b/command/kv_test.go index 6564208ed3353b..830c9cc30770ea 100644 --- a/command/kv_test.go +++ b/command/kv_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -593,131 +590,6 @@ func TestKVGetCommand(t *testing.T) { }) } -func testKVListCommand(tb testing.TB) (*cli.MockUi, *KVListCommand) { - tb.Helper() - ui := cli.NewMockUi() - cmd := &KVListCommand{ - BaseCommand: &BaseCommand{ - UI: ui, - }, - } - - return ui, cmd -} - -// TestKVListCommand runs tests for `vault kv list` -func TestKVListCommand(t *testing.T) { - testCases := []struct { - name string - args []string - outStrings []string - code int - }{ - { - name: "default", - args: []string{"kv/my-prefix"}, - outStrings: []string{"secret-0", "secret-1", "secret-2"}, - code: 0, - }, - { - name: "not_enough_args", - args: []string{}, - outStrings: []string{"Not enough arguments"}, - code: 1, - }, - { - name: "v2_default_with_mount", - args: []string{"-mount", "kv", "my-prefix"}, - outStrings: []string{"secret-0", "secret-1", "secret-2"}, - code: 0, - }, - { - name: "v1_default_with_mount", - args: []string{"kv/my-prefix"}, - outStrings: []string{"secret-0", "secret-1", "secret-2"}, - code: 0, - }, - { - name: "v2_not_found", - args: []string{"kv/nope/not/once/never"}, - outStrings: []string{"No value found at kv/metadata/nope/not/once/never"}, - code: 2, - }, - { - name: "v1_mount_only", - args: []string{"kv"}, - outStrings: []string{"my-prefix"}, - code: 0, - }, - { - name: "v2_mount_only", - args: []string{"-mount", "kv"}, - outStrings: []string{"my-prefix"}, - code: 0, - }, - { - // this is behavior that should be tested - // `kv` here is an explicit mount - // `my-prefix` is not - // the current kv code will ignore `my-prefix` - name: "ignore_multi_part_mounts", - args: []string{"-mount", "kv/my-prefix"}, - outStrings: []string{"my-prefix"}, - code: 0, - }, - } - - t.Run("validations", func(t *testing.T) { - t.Parallel() - - for _, testCase := range testCases { - testCase := testCase - - t.Run(testCase.name, func(t *testing.T) { - t.Parallel() - - // test setup - client, closer := testVaultServer(t) - defer closer() - - // enable kv-v2 backend - if err := client.Sys().Mount("kv/", &api.MountInput{ - Type: "kv-v2", - }); err != nil { - t.Fatal(err) - } - time.Sleep(time.Second) - - ctx := context.Background() - for i := 0; i < 3; i++ { - path := fmt.Sprintf("my-prefix/secret-%d", i) - _, err := client.KVv2("kv/").Put(ctx, path, map[string]interface{}{ - "foo": "bar", - }) - if err != nil { - t.Fatal(err) - } - } - - ui, cmd := testKVListCommand(t) - cmd.client = client - - code := cmd.Run(testCase.args) - if code != testCase.code { - t.Errorf("expected %d to be %d", code, testCase.code) - } - - combined := ui.OutputWriter.String() + ui.ErrorWriter.String() - for _, str := range testCase.outStrings { - if !strings.Contains(combined, str) { - t.Errorf("expected %q to contain %q", combined, str) - } - } - }) - } - }) -} - func testKVMetadataGetCommand(tb testing.TB) (*cli.MockUi, *KVMetadataGetCommand) { tb.Helper() diff --git a/command/kv_undelete.go b/command/kv_undelete.go index 25de58835f0bd3..90ea608a73165b 100644 --- a/command/kv_undelete.go +++ b/command/kv_undelete.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -150,7 +147,7 @@ func (c *KVUndeleteCommand) Run(args []string) int { return 1 } - undeletePath := addPrefixToKVPath(partialPath, mountPath, "undelete", false) + undeletePath := addPrefixToKVPath(partialPath, mountPath, "undelete") data := map[string]interface{}{ "versions": kvParseVersionsFlags(c.flagVersions), } diff --git a/command/lease.go b/command/lease.go index 29ef79fc2d8cd0..76f6cc174c41bf 100644 --- a/command/lease.go +++ b/command/lease.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/lease_lookup.go b/command/lease_lookup.go index ef53ce52863532..c72c6a174f2ce0 100644 --- a/command/lease_lookup.go +++ b/command/lease_lookup.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/lease_lookup_test.go b/command/lease_lookup_test.go index 536c29c58eeebf..4de63200f5ce79 100644 --- a/command/lease_lookup_test.go +++ b/command/lease_lookup_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/lease_renew.go b/command/lease_renew.go index aad41d66b79526..13eb95ed009349 100644 --- a/command/lease_renew.go +++ b/command/lease_renew.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/lease_renew_test.go b/command/lease_renew_test.go index c24b812b911ac9..aa3b32d0d8b34b 100644 --- a/command/lease_renew_test.go +++ b/command/lease_renew_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/lease_revoke.go b/command/lease_revoke.go index 5efd5ecf198b56..1fc90eff7cb505 100644 --- a/command/lease_revoke.go +++ b/command/lease_revoke.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/lease_revoke_test.go b/command/lease_revoke_test.go index 261041e2331a8a..1aa58c38ac7655 100644 --- a/command/lease_revoke_test.go +++ b/command/lease_revoke_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/list.go b/command/list.go index 028f0d391c6c0a..9831b6633c8c2c 100644 --- a/command/list.go +++ b/command/list.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -81,7 +78,13 @@ func (c *ListCommand) Run(args []string) int { return 2 } - path := sanitizePath(args[0]) + // Append trailing slash + path := args[0] + if !strings.HasSuffix(path, "/") { + path += "/" + } + + path = sanitizePath(path) secret, err := client.Logical().List(path) if err != nil { c.UI.Error(fmt.Sprintf("Error listing %s: %s", path, err)) diff --git a/command/list_test.go b/command/list_test.go index 070184bd91b4ad..b1b6680507f168 100644 --- a/command/list_test.go +++ b/command/list_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/log_flags.go b/command/log_flags.go index 5213d06377ef77..c00086740c0b77 100644 --- a/command/log_flags.go +++ b/command/log_flags.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/log_flags_test.go b/command/log_flags_test.go index 38bfa52e95a393..78ca51c4d19a90 100644 --- a/command/log_flags_test.go +++ b/command/log_flags_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/login.go b/command/login.go index c8bc23254c29ad..30352abbb4e596 100644 --- a/command/login.go +++ b/command/login.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/login_test.go b/command/login_test.go index 3d41d8e8814709..01ee159d825671 100644 --- a/command/login_test.go +++ b/command/login_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/main.go b/command/main.go index 13fbe2181822d8..3c6597decb6c2a 100644 --- a/command/main.go +++ b/command/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/monitor.go b/command/monitor.go index 7545b82da71a31..e6309258973f17 100644 --- a/command/monitor.go +++ b/command/monitor.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/monitor_test.go b/command/monitor_test.go index 0cc722c98d91f0..b790cfb0bc07b7 100644 --- a/command/monitor_test.go +++ b/command/monitor_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/namespace.go b/command/namespace.go index 18bc6e92eb5521..702395753da8a6 100644 --- a/command/namespace.go +++ b/command/namespace.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/namespace_api_lock.go b/command/namespace_api_lock.go index 57b196992c9454..48fec344c74160 100644 --- a/command/namespace_api_lock.go +++ b/command/namespace_api_lock.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/namespace_api_unlock.go b/command/namespace_api_unlock.go index 77e829147afb26..38f4a764d4d529 100644 --- a/command/namespace_api_unlock.go +++ b/command/namespace_api_unlock.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/namespace_create.go b/command/namespace_create.go index 60df834d739a86..7d1f52fa8c9ba6 100644 --- a/command/namespace_create.go +++ b/command/namespace_create.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/namespace_delete.go b/command/namespace_delete.go index 5c79c35b1050ba..a5d18929368b8b 100644 --- a/command/namespace_delete.go +++ b/command/namespace_delete.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/namespace_list.go b/command/namespace_list.go index 6394daedf36c62..2be2a3874df5be 100644 --- a/command/namespace_list.go +++ b/command/namespace_list.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/namespace_lookup.go b/command/namespace_lookup.go index ee18736633a542..98d710ea536e2e 100644 --- a/command/namespace_lookup.go +++ b/command/namespace_lookup.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/namespace_patch.go b/command/namespace_patch.go index 2a4a6dc699a80c..3ae6f6bc8b31b2 100644 --- a/command/namespace_patch.go +++ b/command/namespace_patch.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator.go b/command/operator.go index a79f7bff84060b..ad1bb439fc7d50 100644 --- a/command/operator.go +++ b/command/operator.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_diagnose.go b/command/operator_diagnose.go index 5abddb7c980d6b..bf78ea49c233b4 100644 --- a/command/operator_diagnose.go +++ b/command/operator_diagnose.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_diagnose_test.go b/command/operator_diagnose_test.go index 2c9a1a0363e3c7..99834b9d48bcd0 100644 --- a/command/operator_diagnose_test.go +++ b/command/operator_diagnose_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !race package command diff --git a/command/operator_generate_root.go b/command/operator_generate_root.go index 6665e8bdbf8cec..ece541683be1d4 100644 --- a/command/operator_generate_root.go +++ b/command/operator_generate_root.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_generate_root_test.go b/command/operator_generate_root_test.go index 1436ab2ddfae5a..b4489718efbeaf 100644 --- a/command/operator_generate_root_test.go +++ b/command/operator_generate_root_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !race package command diff --git a/command/operator_init.go b/command/operator_init.go index 080f5853def56d..3b0dfe3de2b5e0 100644 --- a/command/operator_init.go +++ b/command/operator_init.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_init_test.go b/command/operator_init_test.go index 06647d7278248a..ec02873587dfcb 100644 --- a/command/operator_init_test.go +++ b/command/operator_init_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !race package command diff --git a/command/operator_key_status.go b/command/operator_key_status.go index 412a00cd20e6cf..e015fb0e32f747 100644 --- a/command/operator_key_status.go +++ b/command/operator_key_status.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_key_status_test.go b/command/operator_key_status_test.go index 9f8fbb0f6dc490..01cb9136286b18 100644 --- a/command/operator_key_status_test.go +++ b/command/operator_key_status_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_members.go b/command/operator_members.go index 986313a3201b0d..d4bd1fbe4389f1 100644 --- a/command/operator_members.go +++ b/command/operator_members.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_migrate.go b/command/operator_migrate.go index 01e8bbb21322e5..5d3fda46ae6c60 100644 --- a/command/operator_migrate.go +++ b/command/operator_migrate.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_migrate_test.go b/command/operator_migrate_test.go index 3d284309148464..d292626d020a5e 100644 --- a/command/operator_migrate_test.go +++ b/command/operator_migrate_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft.go b/command/operator_raft.go index 8720b78ce7cb7c..34107dbb595355 100644 --- a/command/operator_raft.go +++ b/command/operator_raft.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_autopilot_get_config.go b/command/operator_raft_autopilot_get_config.go index 11d7da87d89502..1462e354c55908 100644 --- a/command/operator_raft_autopilot_get_config.go +++ b/command/operator_raft_autopilot_get_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_autopilot_set_config.go b/command/operator_raft_autopilot_set_config.go index 0d9a5f33629517..4a839c5fae3ab3 100644 --- a/command/operator_raft_autopilot_set_config.go +++ b/command/operator_raft_autopilot_set_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_autopilot_state.go b/command/operator_raft_autopilot_state.go index 99f188aae78f06..8a530dc75e0f98 100644 --- a/command/operator_raft_autopilot_state.go +++ b/command/operator_raft_autopilot_state.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_join.go b/command/operator_raft_join.go index 57e14a827edcb9..466ab84142b417 100644 --- a/command/operator_raft_join.go +++ b/command/operator_raft_join.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_listpeers.go b/command/operator_raft_listpeers.go index 4e82c15da98edf..2c80112ec3faa2 100644 --- a/command/operator_raft_listpeers.go +++ b/command/operator_raft_listpeers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_remove_peer.go b/command/operator_raft_remove_peer.go index 84b516cf62947e..6f7e837474fac4 100644 --- a/command/operator_raft_remove_peer.go +++ b/command/operator_raft_remove_peer.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_snapshot.go b/command/operator_raft_snapshot.go index 036c6ebae09f65..5e3b0428773630 100644 --- a/command/operator_raft_snapshot.go +++ b/command/operator_raft_snapshot.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_snapshot_restore.go b/command/operator_raft_snapshot_restore.go index 6067adca790490..3755d6cbfd0d93 100644 --- a/command/operator_raft_snapshot_restore.go +++ b/command/operator_raft_snapshot_restore.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_raft_snapshot_save.go b/command/operator_raft_snapshot_save.go index 2abbb0ad2234e1..496b0a7b52c485 100644 --- a/command/operator_raft_snapshot_save.go +++ b/command/operator_raft_snapshot_save.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_rekey.go b/command/operator_rekey.go index dde0e5800528a3..c15bff811a1d98 100644 --- a/command/operator_rekey.go +++ b/command/operator_rekey.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_rekey_test.go b/command/operator_rekey_test.go index 570cfe447e69e5..08285aa347177e 100644 --- a/command/operator_rekey_test.go +++ b/command/operator_rekey_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !race package command diff --git a/command/operator_seal.go b/command/operator_seal.go index f52665fd08b10c..369ec3215d66f4 100644 --- a/command/operator_seal.go +++ b/command/operator_seal.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_seal_test.go b/command/operator_seal_test.go index 43e150f7e37fcb..86722d2e84dd7f 100644 --- a/command/operator_seal_test.go +++ b/command/operator_seal_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_step_down.go b/command/operator_step_down.go index bfa2d893f18fb0..dea2c97178da8d 100644 --- a/command/operator_step_down.go +++ b/command/operator_step_down.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_step_down_test.go b/command/operator_step_down_test.go index fbe07794d2971a..93117a856b665e 100644 --- a/command/operator_step_down_test.go +++ b/command/operator_step_down_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_unseal.go b/command/operator_unseal.go index 32d9140900a511..8cdd06d384087c 100644 --- a/command/operator_unseal.go +++ b/command/operator_unseal.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_unseal_test.go b/command/operator_unseal_test.go index cb4d19603ba4f9..867b17a03c01e7 100644 --- a/command/operator_unseal_test.go +++ b/command/operator_unseal_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/operator_usage.go b/command/operator_usage.go index 8db538a298c545..1df42091ccc31d 100644 --- a/command/operator_usage.go +++ b/command/operator_usage.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/patch.go b/command/patch.go index 9a4cd58862875e..8edb77ea86f7a1 100644 --- a/command/patch.go +++ b/command/patch.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/patch_test.go b/command/patch_test.go index 410e644656184b..b4bdd7a6243dc1 100644 --- a/command/patch_test.go +++ b/command/patch_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/path_help.go b/command/path_help.go index 41f3bcee66a6ef..1f540a5c6ab00e 100644 --- a/command/path_help.go +++ b/command/path_help.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/path_help_test.go b/command/path_help_test.go index eaf4fe8a9f2395..688bcf09cef3c6 100644 --- a/command/path_help_test.go +++ b/command/path_help_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pgp_test.go b/command/pgp_test.go index f37e488ed68655..b9f3ee2a91acd1 100644 --- a/command/pgp_test.go +++ b/command/pgp_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki.go b/command/pki.go index 89770fa4485168..8ae5eae4a64ee6 100644 --- a/command/pki.go +++ b/command/pki.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki_health_check.go b/command/pki_health_check.go index 188c95c99e93aa..8c56a2c1f0ed26 100644 --- a/command/pki_health_check.go +++ b/command/pki_health_check.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -220,8 +217,6 @@ func (c *PKIHealthCheckCommand) Run(args []string) int { executor.AddCheck(healthcheck.NewEnableAutoTidyCheck()) executor.AddCheck(healthcheck.NewTidyLastRunCheck()) executor.AddCheck(healthcheck.NewTooManyCertsCheck()) - executor.AddCheck(healthcheck.NewEnableAcmeIssuance()) - executor.AddCheck(healthcheck.NewAllowAcmeHeaders()) if c.flagDefaultDisabled { executor.DefaultEnabled = false } diff --git a/command/pki_health_check_test.go b/command/pki_health_check_test.go index 93d231593ee787..af3cf337a4688a 100644 --- a/command/pki_health_check_test.go +++ b/command/pki_health_check_test.go @@ -1,13 +1,9 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( "bytes" "encoding/json" "fmt" - "net/url" "strings" "testing" "time" @@ -31,7 +27,7 @@ func TestPKIHC_AllGood(t *testing.T) { AuditNonHMACRequestKeys: healthcheck.VisibleReqParams, AuditNonHMACResponseKeys: healthcheck.VisibleRespParams, PassthroughRequestHeaders: []string{"If-Modified-Since"}, - AllowedResponseHeaders: []string{"Last-Modified", "Replay-Nonce", "Link", "Location"}, + AllowedResponseHeaders: []string{"Last-Modified"}, MaxLeaseTTL: "36500d", }, }); err != nil { @@ -70,21 +66,6 @@ func TestPKIHC_AllGood(t *testing.T) { t.Fatalf("failed to run tidy: %v", err) } - path, err := url.Parse(client.Address()) - require.NoError(t, err, "failed parsing client address") - - if _, err := client.Logical().Write("pki/config/cluster", map[string]interface{}{ - "path": path.JoinPath("/v1/", "pki/").String(), - }); err != nil { - t.Fatalf("failed to update local cluster: %v", err) - } - - if _, err := client.Logical().Write("pki/config/acme", map[string]interface{}{ - "enabled": "true", - }); err != nil { - t.Fatalf("failed to update acme config: %v", err) - } - _, _, results := execPKIHC(t, client, true) validateExpectedPKIHC(t, expectedAllGood, results) @@ -361,11 +342,6 @@ var expectedAllGood = map[string][]map[string]interface{}{ "status": "ok", }, }, - "allow_acme_headers": { - { - "status": "ok", - }, - }, "allow_if_modified_since": { { "status": "ok", @@ -376,11 +352,6 @@ var expectedAllGood = map[string][]map[string]interface{}{ "status": "ok", }, }, - "enable_acme_issuance": { - { - "status": "ok", - }, - }, "enable_auto_tidy": { { "status": "ok", @@ -432,11 +403,6 @@ var expectedAllBad = map[string][]map[string]interface{}{ "status": "critical", }, }, - "allow_acme_headers": { - { - "status": "not_applicable", - }, - }, "allow_if_modified_since": { { "status": "informational", @@ -534,11 +500,6 @@ var expectedAllBad = map[string][]map[string]interface{}{ "status": "informational", }, }, - "enable_acme_issuance": { - { - "status": "not_applicable", - }, - }, "enable_auto_tidy": { { "status": "informational", @@ -590,18 +551,8 @@ var expectedEmptyWithIssuer = map[string][]map[string]interface{}{ "status": "ok", }, }, - "allow_acme_headers": { - { - "status": "not_applicable", - }, - }, "allow_if_modified_since": nil, "audit_visibility": nil, - "enable_acme_issuance": { - { - "status": "not_applicable", - }, - }, "enable_auto_tidy": { { "status": "informational", @@ -644,18 +595,8 @@ var expectedNoPerm = map[string][]map[string]interface{}{ "status": "critical", }, }, - "allow_acme_headers": { - { - "status": "insufficient_permissions", - }, - }, "allow_if_modified_since": nil, "audit_visibility": nil, - "enable_acme_issuance": { - { - "status": "insufficient_permissions", - }, - }, "enable_auto_tidy": { { "status": "insufficient_permissions", diff --git a/command/pki_issue_intermediate.go b/command/pki_issue_intermediate.go index fe16fdaaca91a0..7fd881aac1bb36 100644 --- a/command/pki_issue_intermediate.go +++ b/command/pki_issue_intermediate.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki_issue_intermediate_test.go b/command/pki_issue_intermediate_test.go index 58f9e6271105d5..3aa491a1bd1089 100644 --- a/command/pki_issue_intermediate_test.go +++ b/command/pki_issue_intermediate_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki_list_intermediate.go b/command/pki_list_intermediate.go index c62c580806919a..2de5a8e8ab7634 100644 --- a/command/pki_list_intermediate.go +++ b/command/pki_list_intermediate.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki_list_intermediate_test.go b/command/pki_list_intermediate_test.go index d494c193387acd..87b38ec59e9d78 100644 --- a/command/pki_list_intermediate_test.go +++ b/command/pki_list_intermediate_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki_reissue_intermediate.go b/command/pki_reissue_intermediate.go index 852c0c0f1d24b6..4c6659cf3770af 100644 --- a/command/pki_reissue_intermediate.go +++ b/command/pki_reissue_intermediate.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki_reissue_intermediate_test.go b/command/pki_reissue_intermediate_test.go index e485f04d322b2c..928449344cb67a 100644 --- a/command/pki_reissue_intermediate_test.go +++ b/command/pki_reissue_intermediate_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki_verify_sign.go b/command/pki_verify_sign.go index b5a864f12c9ffa..cee6ae00d1a059 100644 --- a/command/pki_verify_sign.go +++ b/command/pki_verify_sign.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/pki_verify_sign_test.go b/command/pki_verify_sign_test.go index 3f8986a5b6b169..5ac09cc8033811 100644 --- a/command/pki_verify_sign_test.go +++ b/command/pki_verify_sign_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin.go b/command/plugin.go index ca55a4bf73172e..cf0a5009f62606 100644 --- a/command/plugin.go +++ b/command/plugin.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_deregister.go b/command/plugin_deregister.go index 86b329f1063d10..fc078ffd437fe3 100644 --- a/command/plugin_deregister.go +++ b/command/plugin_deregister.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_deregister_test.go b/command/plugin_deregister_test.go index b05644e46d4fe9..f74f3f1f3b291c 100644 --- a/command/plugin_deregister_test.go +++ b/command/plugin_deregister_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_info.go b/command/plugin_info.go index 1fa9555ba9c27d..bcd13fc5f71f97 100644 --- a/command/plugin_info.go +++ b/command/plugin_info.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_info_test.go b/command/plugin_info_test.go index 921014cec538a5..c48977c4af933b 100644 --- a/command/plugin_info_test.go +++ b/command/plugin_info_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_list.go b/command/plugin_list.go index f1b0e5ebfa04bc..93d06d78864f72 100644 --- a/command/plugin_list.go +++ b/command/plugin_list.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_list_test.go b/command/plugin_list_test.go index edae76558f374c..8e4bbbff83e672 100644 --- a/command/plugin_list_test.go +++ b/command/plugin_list_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_register.go b/command/plugin_register.go index e9d2e5b7c67739..6e2e2df487cc69 100644 --- a/command/plugin_register.go +++ b/command/plugin_register.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_register_test.go b/command/plugin_register_test.go index eccd5c1f001458..a28618eec93a99 100644 --- a/command/plugin_register_test.go +++ b/command/plugin_register_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_reload.go b/command/plugin_reload.go index 2e95fdd1430db8..ae3c663869fe59 100644 --- a/command/plugin_reload.go +++ b/command/plugin_reload.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_reload_status.go b/command/plugin_reload_status.go index e527a07d1bfba9..319d539c15468f 100644 --- a/command/plugin_reload_status.go +++ b/command/plugin_reload_status.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_reload_test.go b/command/plugin_reload_test.go index 646fda92450577..5ff39723e20405 100644 --- a/command/plugin_reload_test.go +++ b/command/plugin_reload_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/plugin_test.go b/command/plugin_test.go index 08c350cbd08827..ee0c6e53d97c6e 100644 --- a/command/plugin_test.go +++ b/command/plugin_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy.go b/command/policy.go index 289aae134a4ff2..59ffdf0bfb3d4f 100644 --- a/command/policy.go +++ b/command/policy.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_delete.go b/command/policy_delete.go index 199fb74a966365..76fa9a21d0f860 100644 --- a/command/policy_delete.go +++ b/command/policy_delete.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_delete_test.go b/command/policy_delete_test.go index 008cd59766da36..2c822de9d4fdd0 100644 --- a/command/policy_delete_test.go +++ b/command/policy_delete_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_fmt.go b/command/policy_fmt.go index 75a91791327c57..7912c10643d9c4 100644 --- a/command/policy_fmt.go +++ b/command/policy_fmt.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_fmt_test.go b/command/policy_fmt_test.go index 89ed5215b6dad0..2ae92ff6947e0b 100644 --- a/command/policy_fmt_test.go +++ b/command/policy_fmt_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_list.go b/command/policy_list.go index 7b5bfc12c98e71..53e85df0fdb9b8 100644 --- a/command/policy_list.go +++ b/command/policy_list.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_list_test.go b/command/policy_list_test.go index 19766978c73307..70defe54ead7a0 100644 --- a/command/policy_list_test.go +++ b/command/policy_list_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_read.go b/command/policy_read.go index 4f226444bea806..31777c5d5ae9e4 100644 --- a/command/policy_read.go +++ b/command/policy_read.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_read_test.go b/command/policy_read_test.go index f091749176b502..8cd7c066b8ce21 100644 --- a/command/policy_read_test.go +++ b/command/policy_read_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_write.go b/command/policy_write.go index 81ff2b3e1993cd..538414bc50fa89 100644 --- a/command/policy_write.go +++ b/command/policy_write.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/policy_write_test.go b/command/policy_write_test.go index 8294ef1934dbb8..c8db7dc9ddc232 100644 --- a/command/policy_write_test.go +++ b/command/policy_write_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/print.go b/command/print.go index 19ac0a674dbc3e..dace6ac951d68e 100644 --- a/command/print.go +++ b/command/print.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/print_token.go b/command/print_token.go index 862af23e0b2d31..efe5aeedd3efd2 100644 --- a/command/print_token.go +++ b/command/print_token.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/proxy.go b/command/proxy.go deleted file mode 100644 index 0eee175af9703d..00000000000000 --- a/command/proxy.go +++ /dev/null @@ -1,1116 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package command - -import ( - "context" - "crypto/tls" - "flag" - "fmt" - "io" - "net" - "net/http" - "os" - "sort" - "strings" - "sync" - "time" - - systemd "github.com/coreos/go-systemd/daemon" - ctconfig "github.com/hashicorp/consul-template/config" - log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/go-multierror" - "github.com/hashicorp/go-secure-stdlib/gatedwriter" - "github.com/hashicorp/go-secure-stdlib/parseutil" - "github.com/hashicorp/go-secure-stdlib/reloadutil" - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/command/agentproxyshared" - "github.com/hashicorp/vault/command/agentproxyshared/auth" - cache "github.com/hashicorp/vault/command/agentproxyshared/cache" - "github.com/hashicorp/vault/command/agentproxyshared/sink" - "github.com/hashicorp/vault/command/agentproxyshared/sink/file" - "github.com/hashicorp/vault/command/agentproxyshared/sink/inmem" - "github.com/hashicorp/vault/command/agentproxyshared/winsvc" - proxyConfig "github.com/hashicorp/vault/command/proxy/config" - "github.com/hashicorp/vault/helper/logging" - "github.com/hashicorp/vault/helper/metricsutil" - "github.com/hashicorp/vault/helper/useragent" - "github.com/hashicorp/vault/internalshared/configutil" - "github.com/hashicorp/vault/internalshared/listenerutil" - "github.com/hashicorp/vault/sdk/helper/consts" - "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/version" - "github.com/kr/pretty" - "github.com/mitchellh/cli" - "github.com/oklog/run" - "github.com/posener/complete" - "golang.org/x/text/cases" - "golang.org/x/text/language" - "google.golang.org/grpc/test/bufconn" -) - -var ( - _ cli.Command = (*ProxyCommand)(nil) - _ cli.CommandAutocomplete = (*ProxyCommand)(nil) -) - -const ( - // flagNameProxyExitAfterAuth is used as a Proxy specific flag to indicate - // that proxy should exit after a single successful auth - flagNameProxyExitAfterAuth = "exit-after-auth" -) - -type ProxyCommand struct { - *BaseCommand - logFlags logFlags - - config *proxyConfig.Config - - ShutdownCh chan struct{} - SighupCh chan struct{} - - tlsReloadFuncsLock sync.RWMutex - tlsReloadFuncs []reloadutil.ReloadFunc - - logWriter io.Writer - logGate *gatedwriter.Writer - logger log.Logger - - // Telemetry object - metricsHelper *metricsutil.MetricsHelper - - cleanupGuard sync.Once - - startedCh chan struct{} // for tests - reloadedCh chan struct{} // for tests - - flagConfigs []string - flagExitAfterAuth bool - flagTestVerifyOnly bool -} - -func (c *ProxyCommand) Synopsis() string { - return "Start a Vault Proxy" -} - -func (c *ProxyCommand) Help() string { - helpText := ` -Usage: vault proxy [options] - - This command starts a Vault Proxy that can perform automatic authentication - in certain environments. - - Start a proxy with a configuration file: - - $ vault proxy -config=/etc/vault/config.hcl - - For a full list of examples, please see the documentation. - -` + c.Flags().Help() - return strings.TrimSpace(helpText) -} - -func (c *ProxyCommand) Flags() *FlagSets { - set := c.flagSet(FlagSetHTTP) - - f := set.NewFlagSet("Command Options") - - // Augment with the log flags - f.addLogFlags(&c.logFlags) - - f.StringSliceVar(&StringSliceVar{ - Name: "config", - Target: &c.flagConfigs, - Completion: complete.PredictOr( - complete.PredictFiles("*.hcl"), - complete.PredictFiles("*.json"), - ), - Usage: "Path to a configuration file. This configuration file should " + - "contain only proxy directives.", - }) - - f.BoolVar(&BoolVar{ - Name: flagNameProxyExitAfterAuth, - Target: &c.flagExitAfterAuth, - Default: false, - Usage: "If set to true, the proxy will exit with code 0 after a single " + - "successful auth, where success means that a token was retrieved and " + - "all sinks successfully wrote it", - }) - - // Internal-only flags to follow. - // - // Why hello there little source code reader! Welcome to the Vault source - // code. The remaining options are intentionally undocumented and come with - // no warranty or backwards-compatibility promise. Do not use these flags - // in production. Do not build automation using these flags. Unless you are - // developing against Vault, you should not need any of these flags. - f.BoolVar(&BoolVar{ - Name: "test-verify-only", - Target: &c.flagTestVerifyOnly, - Default: false, - Hidden: true, - }) - - // End internal-only flags. - - return set -} - -func (c *ProxyCommand) AutocompleteArgs() complete.Predictor { - return complete.PredictNothing -} - -func (c *ProxyCommand) AutocompleteFlags() complete.Flags { - return c.Flags().Completions() -} - -func (c *ProxyCommand) Run(args []string) int { - f := c.Flags() - - if err := f.Parse(args); err != nil { - c.UI.Error(err.Error()) - return 1 - } - - // Create a logger. We wrap it in a gated writer so that it doesn't - // start logging too early. - c.logGate = gatedwriter.NewWriter(os.Stderr) - c.logWriter = c.logGate - - if c.logFlags.flagCombineLogs { - c.logWriter = os.Stdout - } - - // Validation - if len(c.flagConfigs) < 1 { - c.UI.Error("Must specify exactly at least one config path using -config") - return 1 - } - - config, err := c.loadConfig(c.flagConfigs) - if err != nil { - c.outputErrors(err) - return 1 - } - - if config.AutoAuth == nil { - c.UI.Info("No auto_auth block found in config, the automatic authentication feature will not be started") - } - - c.applyConfigOverrides(f, config) // This only needs to happen on start-up to aggregate config from flags and env vars - c.config = config - - l, err := c.newLogger() - if err != nil { - c.outputErrors(err) - return 1 - } - c.logger = l - - infoKeys := make([]string, 0, 10) - info := make(map[string]string) - info["log level"] = config.LogLevel - infoKeys = append(infoKeys, "log level") - - infoKeys = append(infoKeys, "version") - verInfo := version.GetVersion() - info["version"] = verInfo.FullVersionNumber(false) - if verInfo.Revision != "" { - info["version sha"] = strings.Trim(verInfo.Revision, "'") - infoKeys = append(infoKeys, "version sha") - } - infoKeys = append(infoKeys, "cgo") - info["cgo"] = "disabled" - if version.CgoEnabled { - info["cgo"] = "enabled" - } - - // Tests might not want to start a vault server and just want to verify - // the configuration. - if c.flagTestVerifyOnly { - if os.Getenv("VAULT_TEST_VERIFY_ONLY_DUMP_CONFIG") != "" { - c.UI.Output(fmt.Sprintf( - "\nConfiguration:\n%s\n", - pretty.Sprint(*c.config))) - } - return 0 - } - - // Ignore any setting of Agent/Proxy's address. This client is used by the Proxy - // to reach out to Vault. This should never loop back to the proxy. - c.flagAgentProxyAddress = "" - client, err := c.Client() - if err != nil { - c.UI.Error(fmt.Sprintf( - "Error fetching client: %v", - err)) - return 1 - } - - serverHealth, err := client.Sys().Health() - // We don't have any special behaviour if the error != nil, as this - // is not worth stopping the Proxy process over. - if err == nil { - // Note that we don't exit if the versions don't match, as this is a valid - // configuration, but we should still let the user know. - serverVersion := serverHealth.Version - proxyVersion := version.GetVersion().VersionNumber() - if serverVersion != proxyVersion { - c.UI.Info("==> Note: Vault Proxy version does not match Vault server version. " + - fmt.Sprintf("Vault Proxy version: %s, Vault server version: %s", proxyVersion, serverVersion)) - } - } - - // telemetry configuration - inmemMetrics, _, prometheusEnabled, err := configutil.SetupTelemetry(&configutil.SetupTelemetryOpts{ - Config: config.Telemetry, - Ui: c.UI, - ServiceName: "vault", - DisplayName: "Vault", - UserAgent: useragent.ProxyString(), - ClusterName: config.ClusterName, - }) - if err != nil { - c.UI.Error(fmt.Sprintf("Error initializing telemetry: %s", err)) - return 1 - } - c.metricsHelper = metricsutil.NewMetricsHelper(inmemMetrics, prometheusEnabled) - - var method auth.AuthMethod - var sinks []*sink.SinkConfig - if config.AutoAuth != nil { - if client.Headers().Get(consts.NamespaceHeaderName) == "" && config.AutoAuth.Method.Namespace != "" { - client.SetNamespace(config.AutoAuth.Method.Namespace) - } - - sinkClient, err := client.CloneWithHeaders() - if err != nil { - c.UI.Error(fmt.Sprintf("Error cloning client for file sink: %v", err)) - return 1 - } - - if config.DisableIdleConnsAutoAuth { - sinkClient.SetMaxIdleConnections(-1) - } - - if config.DisableKeepAlivesAutoAuth { - sinkClient.SetDisableKeepAlives(true) - } - - for _, sc := range config.AutoAuth.Sinks { - switch sc.Type { - case "file": - config := &sink.SinkConfig{ - Logger: c.logger.Named("sink.file"), - Config: sc.Config, - Client: sinkClient, - WrapTTL: sc.WrapTTL, - DHType: sc.DHType, - DeriveKey: sc.DeriveKey, - DHPath: sc.DHPath, - AAD: sc.AAD, - } - s, err := file.NewFileSink(config) - if err != nil { - c.UI.Error(fmt.Errorf("error creating file sink: %w", err).Error()) - return 1 - } - config.Sink = s - sinks = append(sinks, config) - default: - c.UI.Error(fmt.Sprintf("Unknown sink type %q", sc.Type)) - return 1 - } - } - - authConfig := &auth.AuthConfig{ - Logger: c.logger.Named(fmt.Sprintf("auth.%s", config.AutoAuth.Method.Type)), - MountPath: config.AutoAuth.Method.MountPath, - Config: config.AutoAuth.Method.Config, - } - method, err = agentproxyshared.GetAutoAuthMethodFromConfig(config.AutoAuth.Method.Type, authConfig, config.Vault.Address) - if err != nil { - c.UI.Error(fmt.Sprintf("Error creating %s auth method: %v", config.AutoAuth.Method.Type, err)) - return 1 - } - } - - // We do this after auto-auth has been configured, because we don't want to - // confuse the issue of retries for auth failures which have their own - // config and are handled a bit differently. - if os.Getenv(api.EnvVaultMaxRetries) == "" { - client.SetMaxRetries(ctconfig.DefaultRetryAttempts) - if config.Vault != nil { - if config.Vault.Retry != nil { - client.SetMaxRetries(config.Vault.Retry.NumRetries) - } - } - } - - enforceConsistency := cache.EnforceConsistencyNever - whenInconsistent := cache.WhenInconsistentFail - if config.APIProxy != nil { - switch config.APIProxy.EnforceConsistency { - case "always": - enforceConsistency = cache.EnforceConsistencyAlways - case "never", "": - default: - c.UI.Error(fmt.Sprintf("Unknown api_proxy setting for enforce_consistency: %q", config.APIProxy.EnforceConsistency)) - return 1 - } - - switch config.APIProxy.WhenInconsistent { - case "retry": - whenInconsistent = cache.WhenInconsistentRetry - case "forward": - whenInconsistent = cache.WhenInconsistentForward - case "fail", "": - default: - c.UI.Error(fmt.Sprintf("Unknown api_proxy setting for when_inconsistent: %q", config.APIProxy.WhenInconsistent)) - return 1 - } - } - - // Warn if cache _and_ cert auto-auth is enabled but certificates were not - // provided in the auto_auth.method["cert"].config stanza. - if config.Cache != nil && (config.AutoAuth != nil && config.AutoAuth.Method != nil && config.AutoAuth.Method.Type == "cert") { - _, okCertFile := config.AutoAuth.Method.Config["client_cert"] - _, okCertKey := config.AutoAuth.Method.Config["client_key"] - - // If neither of these exists in the cert stanza, proxy will use the - // certs from the vault stanza. - if !okCertFile && !okCertKey { - c.UI.Warn(wrapAtLength("WARNING! Cache is enabled and using the same certificates " + - "from the 'cert' auto-auth method specified in the 'vault' stanza. Consider " + - "specifying certificate information in the 'cert' auto-auth's config stanza.")) - } - - } - - // Output the header that the proxy has started - if !c.logFlags.flagCombineLogs { - c.UI.Output("==> Vault Proxy started! Log data will stream in below:\n") - } - - var leaseCache *cache.LeaseCache - var previousToken string - - proxyClient, err := client.CloneWithHeaders() - if err != nil { - c.UI.Error(fmt.Sprintf("Error cloning client for proxying: %v", err)) - return 1 - } - - if config.DisableIdleConnsAPIProxy { - proxyClient.SetMaxIdleConnections(-1) - } - - if config.DisableKeepAlivesAPIProxy { - proxyClient.SetDisableKeepAlives(true) - } - - apiProxyLogger := c.logger.Named("apiproxy") - - // The API proxy to be used, if listeners are configured - apiProxy, err := cache.NewAPIProxy(&cache.APIProxyConfig{ - Client: proxyClient, - Logger: apiProxyLogger, - EnforceConsistency: enforceConsistency, - WhenInconsistentAction: whenInconsistent, - UserAgentStringFunction: useragent.ProxyStringWithProxiedUserAgent, - UserAgentString: useragent.ProxyAPIProxyString(), - }) - if err != nil { - c.UI.Error(fmt.Sprintf("Error creating API proxy: %v", err)) - return 1 - } - - // ctx and cancelFunc are passed to the AuthHandler, SinkServer, - // and other subsystems, so that they can listen for ctx.Done() to - // fire and shut down accordingly. - ctx, cancelFunc := context.WithCancel(context.Background()) - defer cancelFunc() - - // Parse proxy cache configurations - if config.Cache != nil { - cacheLogger := c.logger.Named("cache") - - // Create the lease cache proxier and set its underlying proxier to - // the API proxier. - leaseCache, err = cache.NewLeaseCache(&cache.LeaseCacheConfig{ - Client: proxyClient, - BaseContext: ctx, - Proxier: apiProxy, - Logger: cacheLogger.Named("leasecache"), - }) - if err != nil { - c.UI.Error(fmt.Sprintf("Error creating lease cache: %v", err)) - return 1 - } - - // Configure persistent storage and add to LeaseCache - if config.Cache.Persist != nil { - deferFunc, oldToken, err := agentproxyshared.AddPersistentStorageToLeaseCache(ctx, leaseCache, config.Cache.Persist, cacheLogger) - if err != nil { - c.UI.Error(fmt.Sprintf("Error creating persistent cache: %v", err)) - return 1 - } - previousToken = oldToken - if deferFunc != nil { - defer deferFunc() - } - } - } - - var listeners []net.Listener - - // Ensure we've added all the reload funcs for TLS before anyone triggers a reload. - c.tlsReloadFuncsLock.Lock() - - for i, lnConfig := range config.Listeners { - var ln net.Listener - var tlsCfg *tls.Config - - if lnConfig.Type == listenerutil.BufConnType { - inProcListener := bufconn.Listen(1024 * 1024) - if config.Cache != nil { - config.Cache.InProcDialer = listenerutil.NewBufConnWrapper(inProcListener) - } - ln = inProcListener - } else { - lnBundle, err := cache.StartListener(lnConfig) - if err != nil { - c.UI.Error(fmt.Sprintf("Error starting listener: %v", err)) - return 1 - } - - tlsCfg = lnBundle.TLSConfig - ln = lnBundle.Listener - - // Track the reload func, so we can reload later if needed. - c.tlsReloadFuncs = append(c.tlsReloadFuncs, lnBundle.TLSReloadFunc) - } - - listeners = append(listeners, ln) - - proxyVaultToken := true - var inmemSink sink.Sink - if config.APIProxy != nil { - if config.APIProxy.UseAutoAuthToken { - apiProxyLogger.Debug("auto-auth token is allowed to be used; configuring inmem sink") - inmemSink, err = inmem.New(&sink.SinkConfig{ - Logger: apiProxyLogger, - }, leaseCache) - if err != nil { - c.UI.Error(fmt.Sprintf("Error creating inmem sink for cache: %v", err)) - return 1 - } - sinks = append(sinks, &sink.SinkConfig{ - Logger: apiProxyLogger, - Sink: inmemSink, - }) - } - proxyVaultToken = !config.APIProxy.ForceAutoAuthToken - } - - var muxHandler http.Handler - if leaseCache != nil { - muxHandler = cache.ProxyHandler(ctx, apiProxyLogger, leaseCache, inmemSink, proxyVaultToken) - } else { - muxHandler = cache.ProxyHandler(ctx, apiProxyLogger, apiProxy, inmemSink, proxyVaultToken) - } - - // Parse 'require_request_header' listener config option, and wrap - // the request handler if necessary - if lnConfig.RequireRequestHeader && ("metrics_only" != lnConfig.Role) { - muxHandler = verifyRequestHeader(muxHandler) - } - - // Create a muxer and add paths relevant for the lease cache layer - mux := http.NewServeMux() - quitEnabled := lnConfig.ProxyAPI != nil && lnConfig.ProxyAPI.EnableQuit - - mux.Handle(consts.ProxyPathMetrics, c.handleMetrics()) - if "metrics_only" != lnConfig.Role { - mux.Handle(consts.ProxyPathCacheClear, leaseCache.HandleCacheClear(ctx)) - mux.Handle(consts.ProxyPathQuit, c.handleQuit(quitEnabled)) - mux.Handle("/", muxHandler) - } - - scheme := "https://" - if tlsCfg == nil { - scheme = "http://" - } - if ln.Addr().Network() == "unix" { - scheme = "unix://" - } - - infoKey := fmt.Sprintf("api address %d", i+1) - info[infoKey] = scheme + ln.Addr().String() - infoKeys = append(infoKeys, infoKey) - - server := &http.Server{ - Addr: ln.Addr().String(), - TLSConfig: tlsCfg, - Handler: mux, - ReadHeaderTimeout: 10 * time.Second, - ReadTimeout: 30 * time.Second, - IdleTimeout: 5 * time.Minute, - ErrorLog: apiProxyLogger.StandardLogger(nil), - } - - go server.Serve(ln) - } - - c.tlsReloadFuncsLock.Unlock() - - // Ensure that listeners are closed at all the exits - listenerCloseFunc := func() { - for _, ln := range listeners { - ln.Close() - } - } - defer c.cleanupGuard.Do(listenerCloseFunc) - - // Inform any tests that the server is ready - if c.startedCh != nil { - close(c.startedCh) - } - - var g run.Group - - g.Add(func() error { - for { - select { - case <-c.SighupCh: - c.UI.Output("==> Vault Proxy config reload triggered") - err := c.reloadConfig(c.flagConfigs) - if err != nil { - c.outputErrors(err) - } - // Send the 'reloaded' message on the relevant channel - select { - case c.reloadedCh <- struct{}{}: - default: - } - case <-ctx.Done(): - return nil - } - } - }, func(error) { - cancelFunc() - }) - - // This run group watches for signal termination - g.Add(func() error { - for { - select { - case <-c.ShutdownCh: - c.UI.Output("==> Vault Proxy shutdown triggered") - // Notify systemd that the server is shutting down - // Let the lease cache know this is a shutdown; no need to evict everything - if leaseCache != nil { - leaseCache.SetShuttingDown(true) - } - return nil - case <-ctx.Done(): - return nil - case <-winsvc.ShutdownChannel(): - return nil - } - } - }, func(error) {}) - - // Start auto-auth and sink servers - if method != nil { - // Auth Handler is going to set its own retry values, so we want to - // work on a copy of the client to not affect other subsystems. - ahClient, err := c.client.CloneWithHeaders() - if err != nil { - c.UI.Error(fmt.Sprintf("Error cloning client for auth handler: %v", err)) - return 1 - } - - if config.DisableIdleConnsAutoAuth { - ahClient.SetMaxIdleConnections(-1) - } - - if config.DisableKeepAlivesAutoAuth { - ahClient.SetDisableKeepAlives(true) - } - - ah := auth.NewAuthHandler(&auth.AuthHandlerConfig{ - Logger: c.logger.Named("auth.handler"), - Client: ahClient, - WrapTTL: config.AutoAuth.Method.WrapTTL, - MinBackoff: config.AutoAuth.Method.MinBackoff, - MaxBackoff: config.AutoAuth.Method.MaxBackoff, - EnableReauthOnNewCredentials: config.AutoAuth.EnableReauthOnNewCredentials, - Token: previousToken, - ExitOnError: config.AutoAuth.Method.ExitOnError, - UserAgent: useragent.ProxyAutoAuthString(), - MetricsSignifier: "proxy", - }) - - ss := sink.NewSinkServer(&sink.SinkServerConfig{ - Logger: c.logger.Named("sink.server"), - Client: ahClient, - ExitAfterAuth: config.ExitAfterAuth, - }) - - g.Add(func() error { - return ah.Run(ctx, method) - }, func(error) { - // Let the lease cache know this is a shutdown; no need to evict - // everything - if leaseCache != nil { - leaseCache.SetShuttingDown(true) - } - cancelFunc() - }) - - g.Add(func() error { - err := ss.Run(ctx, ah.OutputCh, sinks) - c.logger.Info("sinks finished, exiting") - - // Start goroutine to drain from ah.OutputCh from this point onward - // to prevent ah.Run from being blocked. - go func() { - for { - select { - case <-ctx.Done(): - return - case <-ah.OutputCh: - } - } - }() - - return err - }, func(error) { - // Let the lease cache know this is a shutdown; no need to evict - // everything - if leaseCache != nil { - leaseCache.SetShuttingDown(true) - } - cancelFunc() - }) - } - - // Server configuration output - padding := 24 - sort.Strings(infoKeys) - caser := cases.Title(language.English) - c.UI.Output("==> Vault Proxy configuration:\n") - for _, k := range infoKeys { - c.UI.Output(fmt.Sprintf( - "%s%s: %s", - strings.Repeat(" ", padding-len(k)), - caser.String(k), - info[k])) - } - c.UI.Output("") - - // Release the log gate. - c.logGate.Flush() - - // Write out the PID to the file now that server has successfully started - if err := c.storePidFile(config.PidFile); err != nil { - c.UI.Error(fmt.Sprintf("Error storing PID: %s", err)) - return 1 - } - - // Notify systemd that the server is ready (if applicable) - c.notifySystemd(systemd.SdNotifyReady) - - defer func() { - if err := c.removePidFile(config.PidFile); err != nil { - c.UI.Error(fmt.Sprintf("Error deleting the PID file: %s", err)) - } - }() - - var exitCode int - if err := g.Run(); err != nil { - c.logger.Error("runtime error encountered", "error", err) - c.UI.Error("Error encountered during run, refer to logs for more details.") - exitCode = 1 - } - c.notifySystemd(systemd.SdNotifyStopping) - return exitCode -} - -// applyConfigOverrides ensures that the config object accurately reflects the desired -// settings as configured by the user. It applies the relevant config setting based -// on the precedence (env var overrides file config, cli overrides env var). -// It mutates the config object supplied. -func (c *ProxyCommand) applyConfigOverrides(f *FlagSets, config *proxyConfig.Config) { - if config.Vault == nil { - config.Vault = &proxyConfig.Vault{} - } - - f.applyLogConfigOverrides(config.SharedConfig) - - f.Visit(func(fl *flag.Flag) { - if fl.Name == flagNameProxyExitAfterAuth { - config.ExitAfterAuth = c.flagExitAfterAuth - } - }) - - c.setStringFlag(f, config.Vault.Address, &StringVar{ - Name: flagNameAddress, - Target: &c.flagAddress, - Default: "https://127.0.0.1:8200", - EnvVar: api.EnvVaultAddress, - }) - config.Vault.Address = c.flagAddress - c.setStringFlag(f, config.Vault.CACert, &StringVar{ - Name: flagNameCACert, - Target: &c.flagCACert, - Default: "", - EnvVar: api.EnvVaultCACert, - }) - config.Vault.CACert = c.flagCACert - c.setStringFlag(f, config.Vault.CAPath, &StringVar{ - Name: flagNameCAPath, - Target: &c.flagCAPath, - Default: "", - EnvVar: api.EnvVaultCAPath, - }) - config.Vault.CAPath = c.flagCAPath - c.setStringFlag(f, config.Vault.ClientCert, &StringVar{ - Name: flagNameClientCert, - Target: &c.flagClientCert, - Default: "", - EnvVar: api.EnvVaultClientCert, - }) - config.Vault.ClientCert = c.flagClientCert - c.setStringFlag(f, config.Vault.ClientKey, &StringVar{ - Name: flagNameClientKey, - Target: &c.flagClientKey, - Default: "", - EnvVar: api.EnvVaultClientKey, - }) - config.Vault.ClientKey = c.flagClientKey - c.setBoolFlag(f, config.Vault.TLSSkipVerify, &BoolVar{ - Name: flagNameTLSSkipVerify, - Target: &c.flagTLSSkipVerify, - Default: false, - EnvVar: api.EnvVaultSkipVerify, - }) - config.Vault.TLSSkipVerify = c.flagTLSSkipVerify - c.setStringFlag(f, config.Vault.TLSServerName, &StringVar{ - Name: flagTLSServerName, - Target: &c.flagTLSServerName, - Default: "", - EnvVar: api.EnvVaultTLSServerName, - }) - config.Vault.TLSServerName = c.flagTLSServerName -} - -func (c *ProxyCommand) notifySystemd(status string) { - sent, err := systemd.SdNotify(false, status) - if err != nil { - c.logger.Error("error notifying systemd", "error", err) - } else { - if sent { - c.logger.Debug("sent systemd notification", "notification", status) - } else { - c.logger.Debug("would have sent systemd notification (systemd not present)", "notification", status) - } - } -} - -func (c *ProxyCommand) setStringFlag(f *FlagSets, configVal string, fVar *StringVar) { - var isFlagSet bool - f.Visit(func(f *flag.Flag) { - if f.Name == fVar.Name { - isFlagSet = true - } - }) - - flagEnvValue, flagEnvSet := os.LookupEnv(fVar.EnvVar) - switch { - case isFlagSet: - // Don't do anything as the flag is already set from the command line - case flagEnvSet: - // Use value from env var - *fVar.Target = flagEnvValue - case configVal != "": - // Use value from config - *fVar.Target = configVal - default: - // Use the default value - *fVar.Target = fVar.Default - } -} - -func (c *ProxyCommand) setBoolFlag(f *FlagSets, configVal bool, fVar *BoolVar) { - var isFlagSet bool - f.Visit(func(f *flag.Flag) { - if f.Name == fVar.Name { - isFlagSet = true - } - }) - - flagEnvValue, flagEnvSet := os.LookupEnv(fVar.EnvVar) - switch { - case isFlagSet: - // Don't do anything as the flag is already set from the command line - case flagEnvSet: - // Use value from env var - *fVar.Target = flagEnvValue != "" - case configVal: - // Use value from config - *fVar.Target = configVal - default: - // Use the default value - *fVar.Target = fVar.Default - } -} - -// storePidFile is used to write out our PID to a file if necessary -func (c *ProxyCommand) storePidFile(pidPath string) error { - // Quit fast if no pidfile - if pidPath == "" { - return nil - } - - // Open the PID file - pidFile, err := os.OpenFile(pidPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600) - if err != nil { - return fmt.Errorf("could not open pid file: %w", err) - } - defer pidFile.Close() - - // Write out the PID - pid := os.Getpid() - _, err = pidFile.WriteString(fmt.Sprintf("%d", pid)) - if err != nil { - return fmt.Errorf("could not write to pid file: %w", err) - } - return nil -} - -// removePidFile is used to cleanup the PID file if necessary -func (c *ProxyCommand) removePidFile(pidPath string) error { - if pidPath == "" { - return nil - } - return os.Remove(pidPath) -} - -func (c *ProxyCommand) handleMetrics() http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet { - logical.RespondError(w, http.StatusMethodNotAllowed, nil) - return - } - - if err := r.ParseForm(); err != nil { - logical.RespondError(w, http.StatusBadRequest, err) - return - } - - format := r.Form.Get("format") - if format == "" { - format = metricsutil.FormatFromRequest(&logical.Request{ - Headers: r.Header, - }) - } - - resp := c.metricsHelper.ResponseForFormat(format) - - status := resp.Data[logical.HTTPStatusCode].(int) - w.Header().Set("Content-Type", resp.Data[logical.HTTPContentType].(string)) - switch v := resp.Data[logical.HTTPRawBody].(type) { - case string: - w.WriteHeader(status) - w.Write([]byte(v)) - case []byte: - w.WriteHeader(status) - w.Write(v) - default: - logical.RespondError(w, http.StatusInternalServerError, fmt.Errorf("wrong response returned")) - } - }) -} - -func (c *ProxyCommand) handleQuit(enabled bool) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if !enabled { - w.WriteHeader(http.StatusNotFound) - return - } - - switch r.Method { - case http.MethodPost: - default: - w.WriteHeader(http.StatusMethodNotAllowed) - return - } - - c.logger.Debug("received quit request") - close(c.ShutdownCh) - }) -} - -// newLogger creates a logger based on parsed config field on the Proxy Command struct. -func (c *ProxyCommand) newLogger() (log.InterceptLogger, error) { - if c.config == nil { - return nil, fmt.Errorf("cannot create logger, no config") - } - - var errors error - - // Parse all the log related config - logLevel, err := logging.ParseLogLevel(c.config.LogLevel) - if err != nil { - errors = multierror.Append(errors, err) - } - - logFormat, err := logging.ParseLogFormat(c.config.LogFormat) - if err != nil { - errors = multierror.Append(errors, err) - } - - logRotateDuration, err := parseutil.ParseDurationSecond(c.config.LogRotateDuration) - if err != nil { - errors = multierror.Append(errors, err) - } - - if errors != nil { - return nil, errors - } - - logCfg := &logging.LogConfig{ - Name: "proxy", - LogLevel: logLevel, - LogFormat: logFormat, - LogFilePath: c.config.LogFile, - LogRotateDuration: logRotateDuration, - LogRotateBytes: c.config.LogRotateBytes, - LogRotateMaxFiles: c.config.LogRotateMaxFiles, - } - - l, err := logging.Setup(logCfg, c.logWriter) - if err != nil { - return nil, err - } - - return l, nil -} - -// loadConfig attempts to generate a Proxy config from the file(s) specified. -func (c *ProxyCommand) loadConfig(paths []string) (*proxyConfig.Config, error) { - var errors error - cfg := proxyConfig.NewConfig() - - for _, configPath := range paths { - configFromPath, err := proxyConfig.LoadConfig(configPath) - if err != nil { - errors = multierror.Append(errors, fmt.Errorf("error loading configuration from %s: %w", configPath, err)) - } else { - cfg = cfg.Merge(configFromPath) - } - } - - if errors != nil { - return nil, errors - } - - if err := cfg.ValidateConfig(); err != nil { - return nil, fmt.Errorf("error validating configuration: %w", err) - } - - return cfg, nil -} - -// reloadConfig will attempt to reload the config from file(s) and adjust certain -// config values without requiring a restart of the Vault Proxy. -// If config is retrieved without error it is stored in the config field of the ProxyCommand. -// This operation is not atomic and could result in updated config but partially applied config settings. -// The error returned from this func may be a multierror. -// This function will most likely be called due to Vault Proxy receiving a SIGHUP signal. -// Currently only reloading the following are supported: -// * log level -// * TLS certs for listeners -func (c *ProxyCommand) reloadConfig(paths []string) error { - // Notify systemd that the server is reloading - c.notifySystemd(systemd.SdNotifyReloading) - defer c.notifySystemd(systemd.SdNotifyReady) - - var errors error - - // Reload the config - cfg, err := c.loadConfig(paths) - if err != nil { - // Returning single error as we won't continue with bad config and won't 'commit' it. - return err - } - c.config = cfg - - // Update the log level - err = c.reloadLogLevel() - if err != nil { - errors = multierror.Append(errors, err) - } - - // Update certs - err = c.reloadCerts() - if err != nil { - errors = multierror.Append(errors, err) - } - - return errors -} - -// reloadLogLevel will attempt to update the log level for the logger attached -// to the ProxyCommand struct using the value currently set in config. -func (c *ProxyCommand) reloadLogLevel() error { - logLevel, err := logging.ParseLogLevel(c.config.LogLevel) - if err != nil { - return err - } - - c.logger.SetLevel(logLevel) - - return nil -} - -// reloadCerts will attempt to reload certificates using a reload func which -// was provided when the listeners were configured, only funcs that were appended -// to the ProxyCommand slice will be invoked. -// This function returns a multierror type so that every func can report an error -// if it encounters one. -func (c *ProxyCommand) reloadCerts() error { - var errors error - - c.tlsReloadFuncsLock.RLock() - defer c.tlsReloadFuncsLock.RUnlock() - - for _, reloadFunc := range c.tlsReloadFuncs { - // Non-TLS listeners will have a nil reload func. - if reloadFunc != nil { - err := reloadFunc() - if err != nil { - errors = multierror.Append(errors, err) - } - } - } - - return errors -} - -// outputErrors will take an error or multierror and handle outputting each to the UI -func (c *ProxyCommand) outputErrors(err error) { - if err != nil { - if me, ok := err.(*multierror.Error); ok { - for _, err := range me.Errors { - c.UI.Error(err.Error()) - } - } else { - c.UI.Error(err.Error()) - } - } -} diff --git a/command/proxy/config/config.go b/command/proxy/config/config.go deleted file mode 100644 index f760f1eb04731a..00000000000000 --- a/command/proxy/config/config.go +++ /dev/null @@ -1,832 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package config - -import ( - "context" - "errors" - "fmt" - "io" - "net" - "os" - "path/filepath" - "strings" - "time" - - ctconfig "github.com/hashicorp/consul-template/config" - "github.com/hashicorp/go-multierror" - "github.com/hashicorp/go-secure-stdlib/parseutil" - "github.com/hashicorp/hcl" - "github.com/hashicorp/hcl/hcl/ast" - "github.com/hashicorp/vault/command/agentproxyshared" - "github.com/hashicorp/vault/helper/namespace" - "github.com/hashicorp/vault/internalshared/configutil" -) - -// Config is the configuration for Vault Proxy. -type Config struct { - *configutil.SharedConfig `hcl:"-"` - - AutoAuth *AutoAuth `hcl:"auto_auth"` - ExitAfterAuth bool `hcl:"exit_after_auth"` - Cache *Cache `hcl:"cache"` - APIProxy *APIProxy `hcl:"api_proxy""` - Vault *Vault `hcl:"vault"` - DisableIdleConns []string `hcl:"disable_idle_connections"` - DisableIdleConnsAPIProxy bool `hcl:"-"` - DisableIdleConnsAutoAuth bool `hcl:"-"` - DisableKeepAlives []string `hcl:"disable_keep_alives"` - DisableKeepAlivesAPIProxy bool `hcl:"-"` - DisableKeepAlivesAutoAuth bool `hcl:"-"` -} - -const ( - DisableIdleConnsEnv = "VAULT_PROXY_DISABLE_IDLE_CONNECTIONS" - DisableKeepAlivesEnv = "VAULT_PROXY_DISABLE_KEEP_ALIVES" -) - -func (c *Config) Prune() { - for _, l := range c.Listeners { - l.RawConfig = nil - l.Profiling.UnusedKeys = nil - l.Telemetry.UnusedKeys = nil - l.CustomResponseHeaders = nil - } - c.FoundKeys = nil - c.UnusedKeys = nil - c.SharedConfig.FoundKeys = nil - c.SharedConfig.UnusedKeys = nil - if c.Telemetry != nil { - c.Telemetry.FoundKeys = nil - c.Telemetry.UnusedKeys = nil - } -} - -type Retry struct { - NumRetries int `hcl:"num_retries"` -} - -// Vault contains configuration for connecting to Vault servers -type Vault struct { - Address string `hcl:"address"` - CACert string `hcl:"ca_cert"` - CAPath string `hcl:"ca_path"` - TLSSkipVerify bool `hcl:"-"` - TLSSkipVerifyRaw interface{} `hcl:"tls_skip_verify"` - ClientCert string `hcl:"client_cert"` - ClientKey string `hcl:"client_key"` - TLSServerName string `hcl:"tls_server_name"` - Retry *Retry `hcl:"retry"` -} - -// transportDialer is an interface that allows passing a custom dialer function -// to an HTTP client's transport config -type transportDialer interface { - // Dial is intended to match https://pkg.go.dev/net#Dialer.Dial - Dial(network, address string) (net.Conn, error) - - // DialContext is intended to match https://pkg.go.dev/net#Dialer.DialContext - DialContext(ctx context.Context, network, address string) (net.Conn, error) -} - -// APIProxy contains any configuration needed for proxy mode -type APIProxy struct { - UseAutoAuthTokenRaw interface{} `hcl:"use_auto_auth_token"` - UseAutoAuthToken bool `hcl:"-"` - ForceAutoAuthToken bool `hcl:"-"` - EnforceConsistency string `hcl:"enforce_consistency"` - WhenInconsistent string `hcl:"when_inconsistent"` -} - -// Cache contains any configuration needed for Cache mode -type Cache struct { - Persist *agentproxyshared.PersistConfig `hcl:"persist"` - InProcDialer transportDialer `hcl:"-"` -} - -// AutoAuth is the configured authentication method and sinks -type AutoAuth struct { - Method *Method `hcl:"-"` - Sinks []*Sink `hcl:"sinks"` - - // NOTE: This is unsupported outside of testing and may disappear at any - // time. - EnableReauthOnNewCredentials bool `hcl:"enable_reauth_on_new_credentials"` -} - -// Method represents the configuration for the authentication backend -type Method struct { - Type string - MountPath string `hcl:"mount_path"` - WrapTTLRaw interface{} `hcl:"wrap_ttl"` - WrapTTL time.Duration `hcl:"-"` - MinBackoffRaw interface{} `hcl:"min_backoff"` - MinBackoff time.Duration `hcl:"-"` - MaxBackoffRaw interface{} `hcl:"max_backoff"` - MaxBackoff time.Duration `hcl:"-"` - Namespace string `hcl:"namespace"` - ExitOnError bool `hcl:"exit_on_err"` - Config map[string]interface{} -} - -// Sink defines a location to write the authenticated token -type Sink struct { - Type string - WrapTTLRaw interface{} `hcl:"wrap_ttl"` - WrapTTL time.Duration `hcl:"-"` - DHType string `hcl:"dh_type"` - DeriveKey bool `hcl:"derive_key"` - DHPath string `hcl:"dh_path"` - AAD string `hcl:"aad"` - AADEnvVar string `hcl:"aad_env_var"` - Config map[string]interface{} -} - -func NewConfig() *Config { - return &Config{ - SharedConfig: new(configutil.SharedConfig), - } -} - -// Merge merges two Proxy configurations. -func (c *Config) Merge(c2 *Config) *Config { - if c2 == nil { - return c - } - - result := NewConfig() - - result.SharedConfig = c.SharedConfig - if c2.SharedConfig != nil { - result.SharedConfig = c.SharedConfig.Merge(c2.SharedConfig) - } - - result.AutoAuth = c.AutoAuth - if c2.AutoAuth != nil { - result.AutoAuth = c2.AutoAuth - } - - result.Cache = c.Cache - if c2.Cache != nil { - result.Cache = c2.Cache - } - - result.APIProxy = c.APIProxy - if c2.APIProxy != nil { - result.APIProxy = c2.APIProxy - } - - result.DisableMlock = c.DisableMlock - if c2.DisableMlock { - result.DisableMlock = c2.DisableMlock - } - - // For these, ignore the non-specific one and overwrite them all - result.DisableIdleConnsAutoAuth = c.DisableIdleConnsAutoAuth - if c2.DisableIdleConnsAutoAuth { - result.DisableIdleConnsAutoAuth = c2.DisableIdleConnsAutoAuth - } - - result.DisableIdleConnsAPIProxy = c.DisableIdleConnsAPIProxy - if c2.DisableIdleConnsAPIProxy { - result.DisableIdleConnsAPIProxy = c2.DisableIdleConnsAPIProxy - } - - result.DisableKeepAlivesAutoAuth = c.DisableKeepAlivesAutoAuth - if c2.DisableKeepAlivesAutoAuth { - result.DisableKeepAlivesAutoAuth = c2.DisableKeepAlivesAutoAuth - } - - result.DisableKeepAlivesAPIProxy = c.DisableKeepAlivesAPIProxy - if c2.DisableKeepAlivesAPIProxy { - result.DisableKeepAlivesAPIProxy = c2.DisableKeepAlivesAPIProxy - } - - result.ExitAfterAuth = c.ExitAfterAuth - if c2.ExitAfterAuth { - result.ExitAfterAuth = c2.ExitAfterAuth - } - - result.Vault = c.Vault - if c2.Vault != nil { - result.Vault = c2.Vault - } - - result.PidFile = c.PidFile - if c2.PidFile != "" { - result.PidFile = c2.PidFile - } - - return result -} - -// ValidateConfig validates a Vault configuration after it has been fully merged together, to -// ensure that required combinations of configs are there -func (c *Config) ValidateConfig() error { - if c.Cache != nil { - if len(c.Listeners) < 1 { - return fmt.Errorf("enabling the cache requires at least 1 listener to be defined") - } - } - - if c.APIProxy != nil { - if len(c.Listeners) < 1 { - return fmt.Errorf("configuring the api_proxy requires at least 1 listener to be defined") - } - - if c.APIProxy.UseAutoAuthToken { - if c.AutoAuth == nil { - return fmt.Errorf("api_proxy.use_auto_auth_token is true but auto_auth not configured") - } - if c.AutoAuth != nil && c.AutoAuth.Method != nil && c.AutoAuth.Method.WrapTTL > 0 { - return fmt.Errorf("api_proxy.use_auto_auth_token is true and auto_auth uses wrapping") - } - } - } - - if c.AutoAuth != nil { - if len(c.AutoAuth.Sinks) == 0 && - (c.APIProxy == nil || !c.APIProxy.UseAutoAuthToken) { - return fmt.Errorf("auto_auth requires at least one sink or api_proxy.use_auto_auth_token=true") - } - } - - if c.AutoAuth == nil && c.Cache == nil && len(c.Listeners) == 0 { - return fmt.Errorf("no auto_auth, cache, or listener block found in config") - } - - return nil -} - -// LoadConfig loads the configuration at the given path, regardless if -// it's a file or directory. -func LoadConfig(path string) (*Config, error) { - fi, err := os.Stat(path) - if err != nil { - return nil, err - } - - if fi.IsDir() { - return LoadConfigDir(path) - } - return LoadConfigFile(path) -} - -// LoadConfigDir loads the configuration at the given path if it's a directory -func LoadConfigDir(dir string) (*Config, error) { - f, err := os.Open(dir) - if err != nil { - return nil, err - } - defer f.Close() - - fi, err := f.Stat() - if err != nil { - return nil, err - } - if !fi.IsDir() { - return nil, fmt.Errorf("configuration path must be a directory: %q", dir) - } - - var files []string - err = nil - for err != io.EOF { - var fis []os.FileInfo - fis, err = f.Readdir(128) - if err != nil && err != io.EOF { - return nil, err - } - - for _, fi := range fis { - // Ignore directories - if fi.IsDir() { - continue - } - - // Only care about files that are valid to load. - name := fi.Name() - skip := true - if strings.HasSuffix(name, ".hcl") { - skip = false - } else if strings.HasSuffix(name, ".json") { - skip = false - } - if skip || isTemporaryFile(name) { - continue - } - - path := filepath.Join(dir, name) - files = append(files, path) - } - } - - result := NewConfig() - for _, f := range files { - config, err := LoadConfigFile(f) - if err != nil { - return nil, fmt.Errorf("error loading %q: %w", f, err) - } - - if result == nil { - result = config - } else { - result = result.Merge(config) - } - } - - return result, nil -} - -// isTemporaryFile returns true or false depending on whether the -// provided file name is a temporary file for the following editors: -// emacs or vim. -func isTemporaryFile(name string) bool { - return strings.HasSuffix(name, "~") || // vim - strings.HasPrefix(name, ".#") || // emacs - (strings.HasPrefix(name, "#") && strings.HasSuffix(name, "#")) // emacs -} - -// LoadConfigFile loads the configuration at the given path if it's a file -func LoadConfigFile(path string) (*Config, error) { - fi, err := os.Stat(path) - if err != nil { - return nil, err - } - - if fi.IsDir() { - return nil, fmt.Errorf("location is a directory, not a file") - } - - // Read the file - d, err := os.ReadFile(path) - if err != nil { - return nil, err - } - - // Parse! - obj, err := hcl.Parse(string(d)) - if err != nil { - return nil, err - } - - // Attribute - ast.Walk(obj, func(n ast.Node) (ast.Node, bool) { - if k, ok := n.(*ast.ObjectKey); ok { - k.Token.Pos.Filename = path - } - return n, true - }) - - // Start building the result - result := NewConfig() - if err := hcl.DecodeObject(result, obj); err != nil { - return nil, err - } - - sharedConfig, err := configutil.ParseConfig(string(d)) - if err != nil { - return nil, err - } - - // Pruning custom headers for Vault for now - for _, ln := range sharedConfig.Listeners { - ln.CustomResponseHeaders = nil - } - - result.SharedConfig = sharedConfig - - list, ok := obj.Node.(*ast.ObjectList) - if !ok { - return nil, fmt.Errorf("error parsing: file doesn't contain a root object") - } - - if err := parseAutoAuth(result, list); err != nil { - return nil, fmt.Errorf("error parsing 'auto_auth': %w", err) - } - - if err := parseCache(result, list); err != nil { - return nil, fmt.Errorf("error parsing 'cache':%w", err) - } - - if err := parseAPIProxy(result, list); err != nil { - return nil, fmt.Errorf("error parsing 'api_proxy':%w", err) - } - - err = parseVault(result, list) - if err != nil { - return nil, fmt.Errorf("error parsing 'vault':%w", err) - } - - if result.Vault != nil { - // Set defaults - if result.Vault.Retry == nil { - result.Vault.Retry = &Retry{} - } - switch result.Vault.Retry.NumRetries { - case 0: - result.Vault.Retry.NumRetries = ctconfig.DefaultRetryAttempts - case -1: - result.Vault.Retry.NumRetries = 0 - } - } - - if disableIdleConnsEnv := os.Getenv(DisableIdleConnsEnv); disableIdleConnsEnv != "" { - result.DisableIdleConns, err = parseutil.ParseCommaStringSlice(strings.ToLower(disableIdleConnsEnv)) - if err != nil { - return nil, fmt.Errorf("error parsing environment variable %s: %v", DisableIdleConnsEnv, err) - } - } - - for _, subsystem := range result.DisableIdleConns { - switch subsystem { - case "auto-auth": - result.DisableIdleConnsAutoAuth = true - case "caching", "proxying": - result.DisableIdleConnsAPIProxy = true - case "": - continue - default: - return nil, fmt.Errorf("unknown disable_idle_connections value: %s", subsystem) - } - } - - if disableKeepAlivesEnv := os.Getenv(DisableKeepAlivesEnv); disableKeepAlivesEnv != "" { - result.DisableKeepAlives, err = parseutil.ParseCommaStringSlice(strings.ToLower(disableKeepAlivesEnv)) - if err != nil { - return nil, fmt.Errorf("error parsing environment variable %s: %v", DisableKeepAlivesEnv, err) - } - } - - for _, subsystem := range result.DisableKeepAlives { - switch subsystem { - case "auto-auth": - result.DisableKeepAlivesAutoAuth = true - case "caching", "proxying": - result.DisableKeepAlivesAPIProxy = true - case "": - continue - default: - return nil, fmt.Errorf("unknown disable_keep_alives value: %s", subsystem) - } - } - - return result, nil -} - -func parseVault(result *Config, list *ast.ObjectList) error { - name := "vault" - - vaultList := list.Filter(name) - if len(vaultList.Items) == 0 { - return nil - } - - if len(vaultList.Items) > 1 { - return fmt.Errorf("one and only one %q block is required", name) - } - - item := vaultList.Items[0] - - var v Vault - err := hcl.DecodeObject(&v, item.Val) - if err != nil { - return err - } - - if v.TLSSkipVerifyRaw != nil { - v.TLSSkipVerify, err = parseutil.ParseBool(v.TLSSkipVerifyRaw) - if err != nil { - return err - } - } - - result.Vault = &v - - subs, ok := item.Val.(*ast.ObjectType) - if !ok { - return fmt.Errorf("could not parse %q as an object", name) - } - - if err := parseRetry(result, subs.List); err != nil { - return fmt.Errorf("error parsing 'retry': %w", err) - } - - return nil -} - -func parseRetry(result *Config, list *ast.ObjectList) error { - name := "retry" - - retryList := list.Filter(name) - if len(retryList.Items) == 0 { - return nil - } - - if len(retryList.Items) > 1 { - return fmt.Errorf("one and only one %q block is required", name) - } - - item := retryList.Items[0] - - var r Retry - err := hcl.DecodeObject(&r, item.Val) - if err != nil { - return err - } - - result.Vault.Retry = &r - - return nil -} - -func parseAPIProxy(result *Config, list *ast.ObjectList) error { - name := "api_proxy" - - apiProxyList := list.Filter(name) - if len(apiProxyList.Items) == 0 { - return nil - } - - if len(apiProxyList.Items) > 1 { - return fmt.Errorf("one and only one %q block is required", name) - } - - item := apiProxyList.Items[0] - - var apiProxy APIProxy - err := hcl.DecodeObject(&apiProxy, item.Val) - if err != nil { - return err - } - - if apiProxy.UseAutoAuthTokenRaw != nil { - apiProxy.UseAutoAuthToken, err = parseutil.ParseBool(apiProxy.UseAutoAuthTokenRaw) - if err != nil { - // Could be a value of "force" instead of "true"/"false" - switch apiProxy.UseAutoAuthTokenRaw.(type) { - case string: - v := apiProxy.UseAutoAuthTokenRaw.(string) - - if !strings.EqualFold(v, "force") { - return fmt.Errorf("value of 'use_auto_auth_token' can be either true/false/force, %q is an invalid option", apiProxy.UseAutoAuthTokenRaw) - } - apiProxy.UseAutoAuthToken = true - apiProxy.ForceAutoAuthToken = true - - default: - return err - } - } - } - result.APIProxy = &apiProxy - - return nil -} - -func parseCache(result *Config, list *ast.ObjectList) error { - name := "cache" - - cacheList := list.Filter(name) - if len(cacheList.Items) == 0 { - return nil - } - - if len(cacheList.Items) > 1 { - return fmt.Errorf("one and only one %q block is required", name) - } - - item := cacheList.Items[0] - - var c Cache - err := hcl.DecodeObject(&c, item.Val) - if err != nil { - return err - } - - result.Cache = &c - - subs, ok := item.Val.(*ast.ObjectType) - if !ok { - return fmt.Errorf("could not parse %q as an object", name) - } - subList := subs.List - if err := parsePersist(result, subList); err != nil { - return fmt.Errorf("error parsing persist: %w", err) - } - - return nil -} - -func parsePersist(result *Config, list *ast.ObjectList) error { - name := "persist" - - persistList := list.Filter(name) - if len(persistList.Items) == 0 { - return nil - } - - if len(persistList.Items) > 1 { - return fmt.Errorf("only one %q block is required", name) - } - - item := persistList.Items[0] - - var p agentproxyshared.PersistConfig - err := hcl.DecodeObject(&p, item.Val) - if err != nil { - return err - } - - if p.Type == "" { - if len(item.Keys) == 1 { - p.Type = strings.ToLower(item.Keys[0].Token.Value().(string)) - } - if p.Type == "" { - return errors.New("persist type must be specified") - } - } - - result.Cache.Persist = &p - - return nil -} - -func parseAutoAuth(result *Config, list *ast.ObjectList) error { - name := "auto_auth" - - autoAuthList := list.Filter(name) - if len(autoAuthList.Items) == 0 { - return nil - } - if len(autoAuthList.Items) > 1 { - return fmt.Errorf("at most one %q block is allowed", name) - } - - // Get our item - item := autoAuthList.Items[0] - - var a AutoAuth - if err := hcl.DecodeObject(&a, item.Val); err != nil { - return err - } - - result.AutoAuth = &a - - subs, ok := item.Val.(*ast.ObjectType) - if !ok { - return fmt.Errorf("could not parse %q as an object", name) - } - subList := subs.List - - if err := parseMethod(result, subList); err != nil { - return fmt.Errorf("error parsing 'method': %w", err) - } - if a.Method == nil { - return fmt.Errorf("no 'method' block found") - } - - if err := parseSinks(result, subList); err != nil { - return fmt.Errorf("error parsing 'sink' stanzas: %w", err) - } - - if result.AutoAuth.Method.WrapTTL > 0 { - if len(result.AutoAuth.Sinks) != 1 { - return fmt.Errorf("error parsing auto_auth: wrapping enabled on auth method and 0 or many sinks defined") - } - - if result.AutoAuth.Sinks[0].WrapTTL > 0 { - return fmt.Errorf("error parsing auto_auth: wrapping enabled both on auth method and sink") - } - } - - if result.AutoAuth.Method.MaxBackoffRaw != nil { - var err error - if result.AutoAuth.Method.MaxBackoff, err = parseutil.ParseDurationSecond(result.AutoAuth.Method.MaxBackoffRaw); err != nil { - return err - } - result.AutoAuth.Method.MaxBackoffRaw = nil - } - - if result.AutoAuth.Method.MinBackoffRaw != nil { - var err error - if result.AutoAuth.Method.MinBackoff, err = parseutil.ParseDurationSecond(result.AutoAuth.Method.MinBackoffRaw); err != nil { - return err - } - result.AutoAuth.Method.MinBackoffRaw = nil - } - - return nil -} - -func parseMethod(result *Config, list *ast.ObjectList) error { - name := "method" - - methodList := list.Filter(name) - if len(methodList.Items) != 1 { - return fmt.Errorf("one and only one %q block is required", name) - } - - // Get our item - item := methodList.Items[0] - - var m Method - if err := hcl.DecodeObject(&m, item.Val); err != nil { - return err - } - - if m.Type == "" { - if len(item.Keys) == 1 { - m.Type = strings.ToLower(item.Keys[0].Token.Value().(string)) - } - if m.Type == "" { - return errors.New("method type must be specified") - } - } - - // Default to Vault's default - if m.MountPath == "" { - m.MountPath = fmt.Sprintf("auth/%s", m.Type) - } - // Standardize on no trailing slash - m.MountPath = strings.TrimSuffix(m.MountPath, "/") - - if m.WrapTTLRaw != nil { - var err error - if m.WrapTTL, err = parseutil.ParseDurationSecond(m.WrapTTLRaw); err != nil { - return err - } - m.WrapTTLRaw = nil - } - - // Canonicalize namespace path if provided - m.Namespace = namespace.Canonicalize(m.Namespace) - - result.AutoAuth.Method = &m - return nil -} - -func parseSinks(result *Config, list *ast.ObjectList) error { - name := "sink" - - sinkList := list.Filter(name) - if len(sinkList.Items) < 1 { - return nil - } - - var ts []*Sink - - for _, item := range sinkList.Items { - var s Sink - if err := hcl.DecodeObject(&s, item.Val); err != nil { - return err - } - - if s.Type == "" { - if len(item.Keys) == 1 { - s.Type = strings.ToLower(item.Keys[0].Token.Value().(string)) - } - if s.Type == "" { - return errors.New("sink type must be specified") - } - } - - if s.WrapTTLRaw != nil { - var err error - if s.WrapTTL, err = parseutil.ParseDurationSecond(s.WrapTTLRaw); err != nil { - return multierror.Prefix(err, fmt.Sprintf("sink.%s", s.Type)) - } - s.WrapTTLRaw = nil - } - - switch s.DHType { - case "": - case "curve25519": - default: - return multierror.Prefix(errors.New("invalid value for 'dh_type'"), fmt.Sprintf("sink.%s", s.Type)) - } - - if s.AADEnvVar != "" { - s.AAD = os.Getenv(s.AADEnvVar) - s.AADEnvVar = "" - } - - switch { - case s.DHPath == "" && s.DHType == "": - if s.AAD != "" { - return multierror.Prefix(errors.New("specifying AAD data without 'dh_type' does not make sense"), fmt.Sprintf("sink.%s", s.Type)) - } - if s.DeriveKey { - return multierror.Prefix(errors.New("specifying 'derive_key' data without 'dh_type' does not make sense"), fmt.Sprintf("sink.%s", s.Type)) - } - case s.DHPath != "" && s.DHType != "": - default: - return multierror.Prefix(errors.New("'dh_type' and 'dh_path' must be specified together"), fmt.Sprintf("sink.%s", s.Type)) - } - - ts = append(ts, &s) - } - - result.AutoAuth.Sinks = ts - return nil -} diff --git a/command/proxy/config/config_test.go b/command/proxy/config/config_test.go deleted file mode 100644 index 612d7a6865085d..00000000000000 --- a/command/proxy/config/config_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package config - -import ( - "testing" - - "github.com/go-test/deep" - "github.com/hashicorp/vault/command/agentproxyshared" - "github.com/hashicorp/vault/internalshared/configutil" -) - -// TestLoadConfigFile_ProxyCache tests loading a config file containing a cache -// as well as a valid proxy config. -func TestLoadConfigFile_ProxyCache(t *testing.T) { - config, err := LoadConfigFile("./test-fixtures/config-cache.hcl") - if err != nil { - t.Fatal(err) - } - - expected := &Config{ - SharedConfig: &configutil.SharedConfig{ - PidFile: "./pidfile", - Listeners: []*configutil.Listener{ - { - Type: "unix", - Address: "/path/to/socket", - TLSDisable: true, - SocketMode: "configmode", - SocketUser: "configuser", - SocketGroup: "configgroup", - }, - { - Type: "tcp", - Address: "127.0.0.1:8300", - TLSDisable: true, - }, - { - Type: "tcp", - Address: "127.0.0.1:3000", - Role: "metrics_only", - TLSDisable: true, - }, - { - Type: "tcp", - Role: "default", - Address: "127.0.0.1:8400", - TLSKeyFile: "/path/to/cakey.pem", - TLSCertFile: "/path/to/cacert.pem", - }, - }, - }, - AutoAuth: &AutoAuth{ - Method: &Method{ - Type: "aws", - MountPath: "auth/aws", - Config: map[string]interface{}{ - "role": "foobar", - }, - }, - Sinks: []*Sink{ - { - Type: "file", - DHType: "curve25519", - DHPath: "/tmp/file-foo-dhpath", - AAD: "foobar", - Config: map[string]interface{}{ - "path": "/tmp/file-foo", - }, - }, - }, - }, - APIProxy: &APIProxy{ - EnforceConsistency: "always", - WhenInconsistent: "retry", - UseAutoAuthTokenRaw: true, - UseAutoAuthToken: true, - ForceAutoAuthToken: false, - }, - Cache: &Cache{ - Persist: &agentproxyshared.PersistConfig{ - Type: "kubernetes", - Path: "/vault/agent-cache/", - KeepAfterImport: true, - ExitOnErr: true, - ServiceAccountTokenFile: "/tmp/serviceaccount/token", - }, - }, - Vault: &Vault{ - Address: "http://127.0.0.1:1111", - CACert: "config_ca_cert", - CAPath: "config_ca_path", - TLSSkipVerifyRaw: interface{}("true"), - TLSSkipVerify: true, - ClientCert: "config_client_cert", - ClientKey: "config_client_key", - Retry: &Retry{ - NumRetries: 12, - }, - }, - } - - config.Prune() - if diff := deep.Equal(config, expected); diff != nil { - t.Fatal(diff) - } - - config, err = LoadConfigFile("./test-fixtures/config-cache-embedded-type.hcl") - if err != nil { - t.Fatal(err) - } - expected.Vault.TLSSkipVerifyRaw = interface{}(true) - - config.Prune() - if diff := deep.Equal(config, expected); diff != nil { - t.Fatal(diff) - } -} diff --git a/command/proxy/config/test-fixtures/config-cache-embedded-type.hcl b/command/proxy/config/test-fixtures/config-cache-embedded-type.hcl deleted file mode 100644 index a7d8ef44c59e2c..00000000000000 --- a/command/proxy/config/test-fixtures/config-cache-embedded-type.hcl +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - -pid_file = "./pidfile" - -auto_auth { - method { - type = "aws" - config = { - role = "foobar" - } - } - - sink { - type = "file" - config = { - path = "/tmp/file-foo" - } - aad = "foobar" - dh_type = "curve25519" - dh_path = "/tmp/file-foo-dhpath" - } -} - -api_proxy { - use_auto_auth_token = true - enforce_consistency = "always" - when_inconsistent = "retry" -} - -cache { - persist "kubernetes" { - path = "/vault/agent-cache/" - keep_after_import = true - exit_on_err = true - service_account_token_file = "/tmp/serviceaccount/token" - } -} - -listener { - type = "unix" - address = "/path/to/socket" - tls_disable = true - socket_mode = "configmode" - socket_user = "configuser" - socket_group = "configgroup" -} - -listener { - type = "tcp" - address = "127.0.0.1:8300" - tls_disable = true -} - -listener { - type = "tcp" - address = "127.0.0.1:3000" - tls_disable = true - role = "metrics_only" -} - -listener { - type = "tcp" - role = "default" - address = "127.0.0.1:8400" - tls_key_file = "/path/to/cakey.pem" - tls_cert_file = "/path/to/cacert.pem" -} - -vault { - address = "http://127.0.0.1:1111" - ca_cert = "config_ca_cert" - ca_path = "config_ca_path" - tls_skip_verify = true - client_cert = "config_client_cert" - client_key = "config_client_key" -} diff --git a/command/proxy/config/test-fixtures/config-cache.hcl b/command/proxy/config/test-fixtures/config-cache.hcl deleted file mode 100644 index d770391fe5b09e..00000000000000 --- a/command/proxy/config/test-fixtures/config-cache.hcl +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - -pid_file = "./pidfile" - -auto_auth { - method { - type = "aws" - config = { - role = "foobar" - } - } - - sink { - type = "file" - config = { - path = "/tmp/file-foo" - } - aad = "foobar" - dh_type = "curve25519" - dh_path = "/tmp/file-foo-dhpath" - } -} - -api_proxy { - use_auto_auth_token = true - enforce_consistency = "always" - when_inconsistent = "retry" -} - -cache { - persist = { - type = "kubernetes" - path = "/vault/agent-cache/" - keep_after_import = true - exit_on_err = true - service_account_token_file = "/tmp/serviceaccount/token" - } -} - -listener "unix" { - address = "/path/to/socket" - tls_disable = true - socket_mode = "configmode" - socket_user = "configuser" - socket_group = "configgroup" -} - -listener "tcp" { - address = "127.0.0.1:8300" - tls_disable = true -} - -listener { - type = "tcp" - address = "127.0.0.1:3000" - tls_disable = true - role = "metrics_only" -} - -listener "tcp" { - role = "default" - address = "127.0.0.1:8400" - tls_key_file = "/path/to/cakey.pem" - tls_cert_file = "/path/to/cacert.pem" -} - -vault { - address = "http://127.0.0.1:1111" - ca_cert = "config_ca_cert" - ca_path = "config_ca_path" - tls_skip_verify = "true" - client_cert = "config_client_cert" - client_key = "config_client_key" -} diff --git a/command/proxy/test-fixtures/reload/reload_bar.key b/command/proxy/test-fixtures/reload/reload_bar.key deleted file mode 100644 index 10849fbe1d7f30..00000000000000 --- a/command/proxy/test-fixtures/reload/reload_bar.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAwF7sRAyUiLcd6es6VeaTRUBOusFFGkmKJ5lU351waCJqXFju -Z6i/SQYNAAnnRgotXSTE1fIPjE2kZNH1hvqE5IpTGgAwy50xpjJrrBBI6e9lyKqj -7T8gLVNBvtC0cpQi+pGrszEI0ckDQCSZHqi/PAzcpmLUgh2KMrgagT+YlN35KHtl -/bQ/Fsn+kqykVqNw69n/CDKNKdDHn1qPwiX9q/fTMj3EG6g+3ntKrUOh8V/gHKPz -q8QGP/wIud2K+tTSorVXr/4zx7xgzlbJkCakzcQQiP6K+paPnDRlE8fK+1gRRyR7 -XCzyp0irUl8G1NjYAR/tVWxiUhlk/jZutb8PpwIDAQABAoIBAEOzJELuindyujxQ -ZD9G3h1I/GwNCFyv9Mbq10u7BIwhUH0fbwdcA7WXQ4v38ERd4IkfH4aLoZ0m1ewF -V/sgvxQO+h/0YTfHImny5KGxOXfaoF92bipYROKuojydBmQsbgLwsRRm9UufCl3Q -g3KewG5JuH112oPQEYq379v8nZ4FxC3Ano1OFBTm9UhHIAX1Dn22kcHOIIw8jCsQ -zp7TZOW+nwtkS41cBwhvV4VIeL6yse2UgbOfRVRwI7B0OtswS5VgW3wysO2mTDKt -V/WCmeht1il/6ZogEHgi/mvDCKpj20wQ1EzGnPdFLdiFJFylf0oufQD/7N/uezbC -is0qJEECgYEA3AE7SeLpe3SZApj2RmE2lcD9/Saj1Y30PznxB7M7hK0sZ1yXEbtS -Qf894iDDD/Cn3ufA4xk/K52CXgAcqvH/h2geG4pWLYsT1mdWhGftprtOMCIvJvzU -8uWJzKdOGVMG7R59wNgEpPDZDpBISjexwQsFo3aw1L/H1/Sa8cdY3a0CgYEA39hB -1oLmGRyE32Q4GF/srG4FqKL1EsbISGDUEYTnaYg2XiM43gu3tC/ikfclk27Jwc2L -m7cA5FxxaEyfoOgfAizfU/uWTAbx9GoXgWsO0hWSN9+YNq61gc5WKoHyrJ/rfrti -y5d7k0OCeBxckLqGDuJqICQ0myiz0El6FU8h5SMCgYEAuhigmiNC9JbwRu40g9v/ -XDVfox9oPmBRVpogdC78DYKeqN/9OZaGQiUxp3GnDni2xyqqUm8srCwT9oeJuF/z -kgpUTV96/hNCuH25BU8UC5Es1jJUSFpdlwjqwx5SRcGhfjnojZMseojwUg1h2MW7 -qls0bc0cTxnaZaYW2qWRWhECgYBrT0cwyQv6GdvxJCBoPwQ9HXmFAKowWC+H0zOX -Onmd8/jsZEJM4J0uuo4Jn8vZxBDg4eL9wVuiHlcXwzP7dYv4BP8DSechh2rS21Ft -b59pQ4IXWw+jl1nYYsyYEDgAXaIN3VNder95N7ICVsZhc6n01MI/qlu1zmt1fOQT -9x2utQKBgHI9SbsfWfbGiu6oLS3+9V1t4dORhj8D8b7z3trvECrD6tPhxoZqtfrH -4apKr3OKRSXk3K+1K6pkMHJHunspucnA1ChXLhzfNF08BSRJkQDGYuaRLS6VGgab -JZTl54bGvO1GkszEBE/9QFcqNVtWGMWXnUPwNNv8t//yJT5rvQil ------END RSA PRIVATE KEY----- diff --git a/command/proxy/test-fixtures/reload/reload_bar.pem b/command/proxy/test-fixtures/reload/reload_bar.pem deleted file mode 100644 index a8217be5c7dfd5..00000000000000 --- a/command/proxy/test-fixtures/reload/reload_bar.pem +++ /dev/null @@ -1,20 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDQzCCAiugAwIBAgIULLCz3mZKmg2xy3rWCud0f1zcmBwwDQYJKoZIhvcNAQEL -BQAwFjEUMBIGA1UEAxMLZXhhbXBsZS5jb20wHhcNMTYwMzEwMDIzNjQ0WhcNMzYw -MzA1MDEzNzE0WjAaMRgwFgYDVQQDEw9iYXIuZXhhbXBsZS5jb20wggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDAXuxEDJSItx3p6zpV5pNFQE66wUUaSYon -mVTfnXBoImpcWO5nqL9JBg0ACedGCi1dJMTV8g+MTaRk0fWG+oTkilMaADDLnTGm -MmusEEjp72XIqqPtPyAtU0G+0LRylCL6kauzMQjRyQNAJJkeqL88DNymYtSCHYoy -uBqBP5iU3fkoe2X9tD8Wyf6SrKRWo3Dr2f8IMo0p0MefWo/CJf2r99MyPcQbqD7e -e0qtQ6HxX+Aco/OrxAY//Ai53Yr61NKitVev/jPHvGDOVsmQJqTNxBCI/or6lo+c -NGUTx8r7WBFHJHtcLPKnSKtSXwbU2NgBH+1VbGJSGWT+Nm61vw+nAgMBAAGjgYQw -gYEwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMCMB0GA1UdDgQWBBSVoF8F -7qbzSryIFrldurAG78LvSjAfBgNVHSMEGDAWgBRzDNvqF/Tq21OgWs13B5YydZjl -vzAgBgNVHREEGTAXgg9iYXIuZXhhbXBsZS5jb22HBH8AAAEwDQYJKoZIhvcNAQEL -BQADggEBAGmz2N282iT2IaEZvOmzIE4znHGkvoxZmrr/2byq5PskBg9ysyCHfUvw -SFA8U7jWjezKTnGRUu5blB+yZdjrMtB4AePWyEqtkJwVsZ2SPeP+9V2gNYK4iktP -UF3aIgBbAbw8rNuGIIB0T4D+6Zyo9Y3MCygs6/N4bRPZgLhewWn1ilklfnl3eqaC -a+JY1NBuTgCMa28NuC+Hy3mCveqhI8tFNiOthlLdgAEbuQaOuNutAG73utZ2aq6Q -W4pajFm3lEf5zt7Lo6ZCFtY/Q8jjURJ9e4O7VjXcqIhBM5bSMI6+fgQyOH0SLboj -RNanJ2bcyF1iPVyPBGzV3dF0ngYzxEY= ------END CERTIFICATE----- diff --git a/command/proxy/test-fixtures/reload/reload_ca.pem b/command/proxy/test-fixtures/reload/reload_ca.pem deleted file mode 100644 index 72a74440c48202..00000000000000 --- a/command/proxy/test-fixtures/reload/reload_ca.pem +++ /dev/null @@ -1,20 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDNTCCAh2gAwIBAgIUBeVo+Ce2BrdRT1cogKvJLtdOky8wDQYJKoZIhvcNAQEL -BQAwFjEUMBIGA1UEAxMLZXhhbXBsZS5jb20wHhcNMTYwMzEwMDIzNTM4WhcNMzYw -MzA1MDIzNjA4WjAWMRQwEgYDVQQDEwtleGFtcGxlLmNvbTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAPTQGWPRIOECGeJB6tR/ftvvtioC9f84fY2QdJ5k -JBupXjPAGYKgS4MGzyT5bz9yY400tCtmh6h7p9tZwHl/TElTugtLQ/8ilMbJTiOM -SiyaMDPHiMJJYKTjm9bu6bKeU1qPZ0Cryes4rygbqs7w2XPgA2RxNmDh7JdX7/h+ -VB5onBmv8g4WFSayowGyDcJWWCbu5yv6ZdH1bqQjgRzQ5xp17WXNmvlzdp2vate/ -9UqPdA8sdJzW/91Gvmros0o/FnG7c2pULhk22wFqO8t2HRjKb3nuxALEJvqoPvad -KjpDTaq1L1ZzxcB7wvWyhy/lNLZL7jiNWy0mN1YB0UpSWdECAwEAAaN7MHkwDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHMM2+oX9Orb -U6BazXcHljJ1mOW/MB8GA1UdIwQYMBaAFHMM2+oX9OrbU6BazXcHljJ1mOW/MBYG -A1UdEQQPMA2CC2V4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQAp17XsOaT9 -hculRqrFptn3+zkH3HrIckHm+28R5xYT8ASFXFcLFugGizJAXVL5lvsRVRIwCoOX -Nhi8XSNEFP640VbHcEl81I84bbRIIDS+Yheu6JDZGemTaDYLv1J3D5SHwgoM+nyf -oTRgotUCIXcwJHmTpWEUkZFKuqBxsoTGzk0jO8wOP6xoJkzxVVG5PvNxs924rxY8 -Y8iaLdDfMeT7Pi0XIliBa/aSp/iqSW8XKyJl5R5vXg9+DOgZUrVzIxObaF5RBl/a -mJOeklJBdNVzQm5+iMpO42lu0TA9eWtpP+YiUEXU17XDvFeQWOocFbQ1Peo0W895 -XRz2GCwCNyvW ------END CERTIFICATE----- diff --git a/command/proxy/test-fixtures/reload/reload_foo.key b/command/proxy/test-fixtures/reload/reload_foo.key deleted file mode 100644 index 86e6cce63e6478..00000000000000 --- a/command/proxy/test-fixtures/reload/reload_foo.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpgIBAAKCAQEAzNyVieSti9XBb5/celB5u8YKRJv3mQS9A4/X0mqY1ePznt1i -ilG7OmG0yM2VAk0ceIAQac3Bsn74jxn2cDlrrVniPXcNgYtMtW0kRqNEo4doo4EX -xZguS9vNBu29useHhif1TGX/pA3dgvaVycUCjzTEVk6qI8UEehMK6gEGZb7nOr0A -A9nipSqoeHpDLe3a4KVqj1vtlJKUvD2i1MuBuQ130cB1K9rufLCShGu7mEgzEosc -gr+K3Bf03IejbeVRyIfLtgj1zuvV1katec75UqRA/bsvt5G9JfJqiZ9mwFN0vp3g -Cr7pdQBSBQ2q4yf9s8CuY5c5w9fl3F8f5QFQoQIDAQABAoIBAQCbCb1qNFRa5ZSV -I8i6ELlwMDqJHfhOJ9XcIjpVljLAfNlcu3Ld92jYkCU/asaAjVckotbJG9yhd5Io -yp9E40/oS4P6vGTOS1vsWgMAKoPBtrKsOwCAm+E9q8UIn1fdSS/5ibgM74x+3bds -a62Em8KKGocUQkhk9a+jq1GxMsFisbHRxEHvClLmDMgGnW3FyGmWwT6yZLPSC0ey -szmmjt3ouP8cLAOmSjzcQBMmEZpQMCgR6Qckg6nrLQAGzZyTdCd875wbGA57DpWX -Lssn95+A5EFvr/6b7DkXeIFCrYBFFa+UQN3PWGEQ6Zjmiw4VgV2vO8yX2kCLlUhU -02bL393ZAoGBAPXPD/0yWINbKUPcRlx/WfWQxfz0bu50ytwIXzVK+pRoAMuNqehK -BJ6kNzTTBq40u+IZ4f5jbLDulymR+4zSkirLE7CyWFJOLNI/8K4Pf5DJUgNdrZjJ -LCtP9XRdxiPatQF0NGfdgHlSJh+/CiRJP4AgB17AnB/4z9/M0ZlJGVrzAoGBANVa -69P3Rp/WPBQv0wx6f0tWppJolWekAHKcDIdQ5HdOZE5CPAYSlTrTUW3uJuqMwU2L -M0Er2gIPKWIR5X+9r7Fvu9hQW6l2v3xLlcrGPiapp3STJvuMxzhRAmXmu3bZfVn1 -Vn7Vf1jPULHtTFSlNFEvYG5UJmygK9BeyyVO5KMbAoGBAMCyAibLQPg4jrDUDZSV -gUAwrgUO2ae1hxHWvkxY6vdMUNNByuB+pgB3W4/dnm8Sh/dHsxJpftt1Lqs39ar/ -p/ZEHLt4FCTxg9GOrm7FV4t5RwG8fko36phJpnIC0UFqQltRbYO+8OgqrhhU+u5X -PaCDe0OcWsf1lYAsYGN6GpZhAoGBAMJ5Ksa9+YEODRs1cIFKUyd/5ztC2xRqOAI/ -3WemQ2nAacuvsfizDZVeMzYpww0+maAuBt0btI719PmwaGmkpDXvK+EDdlmkpOwO -FY6MXvBs6fdnfjwCWUErDi2GQFAX9Jt/9oSL5JU1+08DhvUM1QA/V/2Y9KFE6kr3 -bOIn5F4LAoGBAKQzH/AThDGhT3hwr4ktmReF3qKxBgxzjVa8veXtkY5VWwyN09iT -jnTTt6N1CchZoK5WCETjdzNYP7cuBTcV4d3bPNRiJmxXaNVvx3Tlrk98OiffT8Qa -5DO/Wfb43rNHYXBjU6l0n2zWcQ4PUSSbu0P0bM2JTQPRCqSthXvSHw2P ------END RSA PRIVATE KEY----- diff --git a/command/proxy/test-fixtures/reload/reload_foo.pem b/command/proxy/test-fixtures/reload/reload_foo.pem deleted file mode 100644 index c8b868bcd0f08d..00000000000000 --- a/command/proxy/test-fixtures/reload/reload_foo.pem +++ /dev/null @@ -1,20 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDQzCCAiugAwIBAgIUFVW6i/M+yJUsDrXWgRKO/Dnb+L4wDQYJKoZIhvcNAQEL -BQAwFjEUMBIGA1UEAxMLZXhhbXBsZS5jb20wHhcNMTYwMzEwMDIzNjA1WhcNMzYw -MzA1MDEzNjM1WjAaMRgwFgYDVQQDEw9mb28uZXhhbXBsZS5jb20wggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDM3JWJ5K2L1cFvn9x6UHm7xgpEm/eZBL0D -j9fSapjV4/Oe3WKKUbs6YbTIzZUCTRx4gBBpzcGyfviPGfZwOWutWeI9dw2Bi0y1 -bSRGo0Sjh2ijgRfFmC5L280G7b26x4eGJ/VMZf+kDd2C9pXJxQKPNMRWTqojxQR6 -EwrqAQZlvuc6vQAD2eKlKqh4ekMt7drgpWqPW+2UkpS8PaLUy4G5DXfRwHUr2u58 -sJKEa7uYSDMSixyCv4rcF/Tch6Nt5VHIh8u2CPXO69XWRq15zvlSpED9uy+3kb0l -8mqJn2bAU3S+neAKvul1AFIFDarjJ/2zwK5jlznD1+XcXx/lAVChAgMBAAGjgYQw -gYEwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMCMB0GA1UdDgQWBBRNJoOJ -dnazDiuqLhV6truQ4cRe9jAfBgNVHSMEGDAWgBRzDNvqF/Tq21OgWs13B5YydZjl -vzAgBgNVHREEGTAXgg9mb28uZXhhbXBsZS5jb22HBH8AAAEwDQYJKoZIhvcNAQEL -BQADggEBAHzv67mtbxMWcuMsxCFBN1PJNAyUDZVCB+1gWhk59EySbVg81hWJDCBy -fl3TKjz3i7wBGAv+C2iTxmwsSJbda22v8JQbuscXIfLFbNALsPzF+J0vxAgJs5Gc -sDbfJ7EQOIIOVKQhHLYnQoLnigSSPc1kd0JjYyHEBjgIaSuXgRRTBAeqLiBMx0yh -RKL1lQ+WoBU/9SXUZZkwokqWt5G7khi5qZkNxVXZCm8VGPg0iywf6gGyhI1SU5S2 -oR219S6kA4JY/stw1qne85/EmHmoImHGt08xex3GoU72jKAjsIpqRWopcD/+uene -Tc9nn3fTQW/Z9fsoJ5iF5OdJnDEswqE= ------END CERTIFICATE----- diff --git a/command/proxy_test.go b/command/proxy_test.go deleted file mode 100644 index 01aba59dbc8f37..00000000000000 --- a/command/proxy_test.go +++ /dev/null @@ -1,1254 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package command - -import ( - "crypto/tls" - "crypto/x509" - "fmt" - "net/http" - "os" - "path/filepath" - "reflect" - "strings" - "sync" - "testing" - "time" - - "github.com/hashicorp/go-hclog" - vaultjwt "github.com/hashicorp/vault-plugin-auth-jwt" - logicalKv "github.com/hashicorp/vault-plugin-secrets-kv" - "github.com/hashicorp/vault/api" - credAppRole "github.com/hashicorp/vault/builtin/credential/approle" - "github.com/hashicorp/vault/command/agent" - proxyConfig "github.com/hashicorp/vault/command/proxy/config" - "github.com/hashicorp/vault/helper/testhelpers/minimal" - "github.com/hashicorp/vault/helper/useragent" - vaulthttp "github.com/hashicorp/vault/http" - "github.com/hashicorp/vault/sdk/helper/logging" - "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/vault" - "github.com/mitchellh/cli" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func testProxyCommand(tb testing.TB, logger hclog.Logger) (*cli.MockUi, *ProxyCommand) { - tb.Helper() - - ui := cli.NewMockUi() - return ui, &ProxyCommand{ - BaseCommand: &BaseCommand{ - UI: ui, - }, - ShutdownCh: MakeShutdownCh(), - SighupCh: MakeSighupCh(), - logger: logger, - startedCh: make(chan struct{}, 5), - reloadedCh: make(chan struct{}, 5), - } -} - -// TestProxy_ExitAfterAuth tests the exit_after_auth flag, provided both -// as config and via -exit-after-auth. -func TestProxy_ExitAfterAuth(t *testing.T) { - t.Run("via_config", func(t *testing.T) { - testProxyExitAfterAuth(t, false) - }) - - t.Run("via_flag", func(t *testing.T) { - testProxyExitAfterAuth(t, true) - }) -} - -func testProxyExitAfterAuth(t *testing.T, viaFlag bool) { - logger := logging.NewVaultLogger(hclog.Trace) - coreConfig := &vault.CoreConfig{ - Logger: logger, - CredentialBackends: map[string]logical.Factory{ - "jwt": vaultjwt.Factory, - }, - } - cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - }) - cluster.Start() - defer cluster.Cleanup() - - vault.TestWaitActive(t, cluster.Cores[0].Core) - client := cluster.Cores[0].Client - - // Setup Vault - err := client.Sys().EnableAuthWithOptions("jwt", &api.EnableAuthOptions{ - Type: "jwt", - }) - if err != nil { - t.Fatal(err) - } - - _, err = client.Logical().Write("auth/jwt/config", map[string]interface{}{ - "bound_issuer": "https://team-vault.auth0.com/", - "jwt_validation_pubkeys": agent.TestECDSAPubKey, - "jwt_supported_algs": "ES256", - }) - if err != nil { - t.Fatal(err) - } - - _, err = client.Logical().Write("auth/jwt/role/test", map[string]interface{}{ - "role_type": "jwt", - "bound_subject": "r3qXcK2bix9eFECzsU3Sbmh0K16fatW6@clients", - "bound_audiences": "https://vault.plugin.auth.jwt.test", - "user_claim": "https://vault/user", - "groups_claim": "https://vault/groups", - "policies": "test", - "period": "3s", - }) - if err != nil { - t.Fatal(err) - } - - dir := t.TempDir() - inf, err := os.CreateTemp(dir, "auth.jwt.test.") - if err != nil { - t.Fatal(err) - } - in := inf.Name() - inf.Close() - // We remove these files in this test since we don't need the files, we just need - // a non-conflicting file name for the config. - os.Remove(in) - t.Logf("input: %s", in) - - sink1f, err := os.CreateTemp(dir, "sink1.jwt.test.") - if err != nil { - t.Fatal(err) - } - sink1 := sink1f.Name() - sink1f.Close() - os.Remove(sink1) - t.Logf("sink1: %s", sink1) - - sink2f, err := os.CreateTemp(dir, "sink2.jwt.test.") - if err != nil { - t.Fatal(err) - } - sink2 := sink2f.Name() - sink2f.Close() - os.Remove(sink2) - t.Logf("sink2: %s", sink2) - - conff, err := os.CreateTemp(dir, "conf.jwt.test.") - if err != nil { - t.Fatal(err) - } - conf := conff.Name() - conff.Close() - os.Remove(conf) - t.Logf("config: %s", conf) - - jwtToken, _ := agent.GetTestJWT(t) - if err := os.WriteFile(in, []byte(jwtToken), 0o600); err != nil { - t.Fatal(err) - } else { - logger.Trace("wrote test jwt", "path", in) - } - - exitAfterAuthTemplText := "exit_after_auth = true" - if viaFlag { - exitAfterAuthTemplText = "" - } - - config := ` -%s - -auto_auth { - method { - type = "jwt" - config = { - role = "test" - path = "%s" - } - } - - sink { - type = "file" - config = { - path = "%s" - } - } - - sink "file" { - config = { - path = "%s" - } - } -} -` - - config = fmt.Sprintf(config, exitAfterAuthTemplText, in, sink1, sink2) - if err := os.WriteFile(conf, []byte(config), 0o600); err != nil { - t.Fatal(err) - } else { - logger.Trace("wrote test config", "path", conf) - } - - doneCh := make(chan struct{}) - go func() { - ui, cmd := testProxyCommand(t, logger) - cmd.client = client - - args := []string{"-config", conf} - if viaFlag { - args = append(args, "-exit-after-auth") - } - - code := cmd.Run(args) - if code != 0 { - t.Errorf("expected %d to be %d", code, 0) - t.Logf("output from proxy:\n%s", ui.OutputWriter.String()) - t.Logf("error from proxy:\n%s", ui.ErrorWriter.String()) - } - close(doneCh) - }() - - select { - case <-doneCh: - break - case <-time.After(1 * time.Minute): - t.Fatal("timeout reached while waiting for proxy to exit") - } - - sink1Bytes, err := os.ReadFile(sink1) - if err != nil { - t.Fatal(err) - } - if len(sink1Bytes) == 0 { - t.Fatal("got no output from sink 1") - } - - sink2Bytes, err := os.ReadFile(sink2) - if err != nil { - t.Fatal(err) - } - if len(sink2Bytes) == 0 { - t.Fatal("got no output from sink 2") - } - - if string(sink1Bytes) != string(sink2Bytes) { - t.Fatal("sink 1/2 values don't match") - } -} - -// TestProxy_AutoAuth_UserAgent tests that the User-Agent sent -// to Vault by Vault Proxy is correct when performing Auto-Auth. -// Uses the custom handler userAgentHandler (defined above) so -// that Vault validates the User-Agent on requests sent by Proxy. -func TestProxy_AutoAuth_UserAgent(t *testing.T) { - logger := logging.NewVaultLogger(hclog.Trace) - var h userAgentHandler - cluster := vault.NewTestCluster(t, &vault.CoreConfig{ - Logger: logger, - CredentialBackends: map[string]logical.Factory{ - "approle": credAppRole.Factory, - }, - }, &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: vaulthttp.HandlerFunc( - func(properties *vault.HandlerProperties) http.Handler { - h.props = properties - h.userAgentToCheckFor = useragent.ProxyAutoAuthString() - h.requestMethodToCheck = "PUT" - h.pathToCheck = "auth/approle/login" - h.t = t - return &h - }), - }) - cluster.Start() - defer cluster.Cleanup() - - serverClient := cluster.Cores[0].Client - - // Enable the approle auth method - req := serverClient.NewRequest("POST", "/v1/sys/auth/approle") - req.BodyBytes = []byte(`{ - "type": "approle" - }`) - request(t, serverClient, req, 204) - - // Create a named role - req = serverClient.NewRequest("PUT", "/v1/auth/approle/role/test-role") - req.BodyBytes = []byte(`{ - "secret_id_num_uses": "10", - "secret_id_ttl": "1m", - "token_max_ttl": "1m", - "token_num_uses": "10", - "token_ttl": "1m", - "policies": "default" - }`) - request(t, serverClient, req, 204) - - // Fetch the RoleID of the named role - req = serverClient.NewRequest("GET", "/v1/auth/approle/role/test-role/role-id") - body := request(t, serverClient, req, 200) - data := body["data"].(map[string]interface{}) - roleID := data["role_id"].(string) - - // Get a SecretID issued against the named role - req = serverClient.NewRequest("PUT", "/v1/auth/approle/role/test-role/secret-id") - body = request(t, serverClient, req, 200) - data = body["data"].(map[string]interface{}) - secretID := data["secret_id"].(string) - - // Write the RoleID and SecretID to temp files - roleIDPath := makeTempFile(t, "role_id.txt", roleID+"\n") - secretIDPath := makeTempFile(t, "secret_id.txt", secretID+"\n") - defer os.Remove(roleIDPath) - defer os.Remove(secretIDPath) - - sinkf, err := os.CreateTemp("", "sink.test.") - if err != nil { - t.Fatal(err) - } - sink := sinkf.Name() - sinkf.Close() - os.Remove(sink) - - autoAuthConfig := fmt.Sprintf(` -auto_auth { - method "approle" { - mount_path = "auth/approle" - config = { - role_id_file_path = "%s" - secret_id_file_path = "%s" - } - } - - sink "file" { - config = { - path = "%s" - } - } -}`, roleIDPath, secretIDPath, sink) - - listenAddr := generateListenerAddress(t) - listenConfig := fmt.Sprintf(` -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - - config := fmt.Sprintf(` -vault { - address = "%s" - tls_skip_verify = true -} -api_proxy { - use_auto_auth_token = true -} -%s -%s -`, serverClient.Address(), listenConfig, autoAuthConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - // Unset the environment variable so that proxy picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Unsetenv(api.EnvVaultAddress) - - // Start proxy - _, cmd := testProxyCommand(t, logger) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - // Validate that the auto-auth token has been correctly attained - // and works for LookupSelf - conf := api.DefaultConfig() - conf.Address = "http://" + listenAddr - proxyClient, err := api.NewClient(conf) - if err != nil { - t.Fatalf("err: %s", err) - } - - proxyClient.SetToken("") - err = proxyClient.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - - // Wait for the token to be sent to syncs and be available to be used - time.Sleep(5 * time.Second) - - req = proxyClient.NewRequest("GET", "/v1/auth/token/lookup-self") - body = request(t, proxyClient, req, 200) - - close(cmd.ShutdownCh) - wg.Wait() -} - -// TestProxy_APIProxyWithoutCache_UserAgent tests that the User-Agent sent -// to Vault by Vault Proxy is correct using the API proxy without -// the cache configured. Uses the custom handler -// userAgentHandler struct defined in this test package, so that Vault validates the -// User-Agent on requests sent by Proxy. -func TestProxy_APIProxyWithoutCache_UserAgent(t *testing.T) { - logger := logging.NewVaultLogger(hclog.Trace) - userAgentForProxiedClient := "proxied-client" - var h userAgentHandler - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: vaulthttp.HandlerFunc( - func(properties *vault.HandlerProperties) http.Handler { - h.props = properties - h.userAgentToCheckFor = useragent.ProxyStringWithProxiedUserAgent(userAgentForProxiedClient) - h.pathToCheck = "/v1/auth/token/lookup-self" - h.requestMethodToCheck = "GET" - h.t = t - return &h - }), - }) - cluster.Start() - defer cluster.Cleanup() - - serverClient := cluster.Cores[0].Client - - // Unset the environment variable so that proxy picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Unsetenv(api.EnvVaultAddress) - - listenAddr := generateListenerAddress(t) - listenConfig := fmt.Sprintf(` -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - - config := fmt.Sprintf(` -vault { - address = "%s" - tls_skip_verify = true -} -%s -`, serverClient.Address(), listenConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - // Start the proxy - _, cmd := testProxyCommand(t, logger) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - proxyClient, err := api.NewClient(api.DefaultConfig()) - if err != nil { - t.Fatal(err) - } - proxyClient.AddHeader("User-Agent", userAgentForProxiedClient) - proxyClient.SetToken(serverClient.Token()) - proxyClient.SetMaxRetries(0) - err = proxyClient.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - - _, err = proxyClient.Auth().Token().LookupSelf() - if err != nil { - t.Fatal(err) - } - - close(cmd.ShutdownCh) - wg.Wait() -} - -// TestProxy_APIProxyWithCache_UserAgent tests that the User-Agent sent -// to Vault by Vault Proxy is correct using the API proxy with -// the cache configured. Uses the custom handler -// userAgentHandler struct defined in this test package, so that Vault validates the -// User-Agent on requests sent by Proxy. -func TestProxy_APIProxyWithCache_UserAgent(t *testing.T) { - logger := logging.NewVaultLogger(hclog.Trace) - userAgentForProxiedClient := "proxied-client" - var h userAgentHandler - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: vaulthttp.HandlerFunc( - func(properties *vault.HandlerProperties) http.Handler { - h.props = properties - h.userAgentToCheckFor = useragent.ProxyStringWithProxiedUserAgent(userAgentForProxiedClient) - h.pathToCheck = "/v1/auth/token/lookup-self" - h.requestMethodToCheck = "GET" - h.t = t - return &h - }), - }) - cluster.Start() - defer cluster.Cleanup() - - serverClient := cluster.Cores[0].Client - - // Unset the environment variable so that proxy picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Unsetenv(api.EnvVaultAddress) - - listenAddr := generateListenerAddress(t) - listenConfig := fmt.Sprintf(` -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - - cacheConfig := ` -cache { -}` - - config := fmt.Sprintf(` -vault { - address = "%s" - tls_skip_verify = true -} -%s -%s -`, serverClient.Address(), listenConfig, cacheConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - // Start the proxy - _, cmd := testProxyCommand(t, logger) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - proxyClient, err := api.NewClient(api.DefaultConfig()) - if err != nil { - t.Fatal(err) - } - proxyClient.AddHeader("User-Agent", userAgentForProxiedClient) - proxyClient.SetToken(serverClient.Token()) - proxyClient.SetMaxRetries(0) - err = proxyClient.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - - _, err = proxyClient.Auth().Token().LookupSelf() - if err != nil { - t.Fatal(err) - } - - close(cmd.ShutdownCh) - wg.Wait() -} - -// TestProxy_Cache_DynamicSecret Tests that the cache successfully caches a dynamic secret -// going through the Proxy, -func TestProxy_Cache_DynamicSecret(t *testing.T) { - logger := logging.NewVaultLogger(hclog.Trace) - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - }) - cluster.Start() - defer cluster.Cleanup() - - serverClient := cluster.Cores[0].Client - - // Unset the environment variable so that proxy picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Unsetenv(api.EnvVaultAddress) - - cacheConfig := ` -cache { -} -` - listenAddr := generateListenerAddress(t) - listenConfig := fmt.Sprintf(` -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - - config := fmt.Sprintf(` -vault { - address = "%s" - tls_skip_verify = true -} -%s -%s -`, serverClient.Address(), cacheConfig, listenConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - // Start proxy - _, cmd := testProxyCommand(t, logger) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - proxyClient, err := api.NewClient(api.DefaultConfig()) - if err != nil { - t.Fatal(err) - } - proxyClient.SetToken(serverClient.Token()) - proxyClient.SetMaxRetries(0) - err = proxyClient.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - - renewable := true - tokenCreateRequest := &api.TokenCreateRequest{ - Policies: []string{"default"}, - TTL: "30m", - Renewable: &renewable, - } - - // This was the simplest test I could find to trigger the caching behaviour, - // i.e. the most concise I could make the test that I can tell - // creating an orphan token returns Auth, is renewable, and isn't a token - // that's managed elsewhere (since it's an orphan) - secret, err := proxyClient.Auth().Token().CreateOrphan(tokenCreateRequest) - if err != nil { - t.Fatal(err) - } - if secret == nil || secret.Auth == nil { - t.Fatalf("secret not as expected: %v", secret) - } - - token := secret.Auth.ClientToken - - secret, err = proxyClient.Auth().Token().CreateOrphan(tokenCreateRequest) - if err != nil { - t.Fatal(err) - } - if secret == nil || secret.Auth == nil { - t.Fatalf("secret not as expected: %v", secret) - } - - token2 := secret.Auth.ClientToken - - if token != token2 { - t.Fatalf("token create response not cached when it should have been, as tokens differ") - } - - close(cmd.ShutdownCh) - wg.Wait() -} - -// TestProxy_ApiProxy_Retry Tests the retry functionalities of Vault Proxy's API Proxy -func TestProxy_ApiProxy_Retry(t *testing.T) { - //---------------------------------------------------- - // Start the server and proxy - //---------------------------------------------------- - logger := logging.NewVaultLogger(hclog.Trace) - var h handler - cluster := vault.NewTestCluster(t, - &vault.CoreConfig{ - Logger: logger, - CredentialBackends: map[string]logical.Factory{ - "approle": credAppRole.Factory, - }, - LogicalBackends: map[string]logical.Factory{ - "kv": logicalKv.Factory, - }, - }, - &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: vaulthttp.HandlerFunc(func(properties *vault.HandlerProperties) http.Handler { - h.props = properties - h.t = t - return &h - }), - }) - cluster.Start() - defer cluster.Cleanup() - - vault.TestWaitActive(t, cluster.Cores[0].Core) - serverClient := cluster.Cores[0].Client - - // Unset the environment variable so that proxy picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - os.Unsetenv(api.EnvVaultAddress) - - _, err := serverClient.Logical().Write("secret/foo", map[string]interface{}{ - "bar": "baz", - }) - if err != nil { - t.Fatal(err) - } - - intRef := func(i int) *int { - return &i - } - // start test cases here - testCases := map[string]struct { - retries *int - expectError bool - }{ - "none": { - retries: intRef(-1), - expectError: true, - }, - "one": { - retries: intRef(1), - expectError: true, - }, - "two": { - retries: intRef(2), - expectError: false, - }, - "missing": { - retries: nil, - expectError: false, - }, - "default": { - retries: intRef(0), - expectError: false, - }, - } - - for tcname, tc := range testCases { - t.Run(tcname, func(t *testing.T) { - h.failCount = 2 - - cacheConfig := ` -cache { -} -` - listenAddr := generateListenerAddress(t) - listenConfig := fmt.Sprintf(` -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - - var retryConf string - if tc.retries != nil { - retryConf = fmt.Sprintf("retry { num_retries = %d }", *tc.retries) - } - - config := fmt.Sprintf(` -vault { - address = "%s" - %s - tls_skip_verify = true -} -%s -%s -`, serverClient.Address(), retryConf, cacheConfig, listenConfig) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - _, cmd := testProxyCommand(t, logger) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - client, err := api.NewClient(api.DefaultConfig()) - if err != nil { - t.Fatal(err) - } - client.SetToken(serverClient.Token()) - client.SetMaxRetries(0) - err = client.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - secret, err := client.Logical().Read("secret/foo") - switch { - case (err != nil || secret == nil) && tc.expectError: - case (err == nil || secret != nil) && !tc.expectError: - default: - t.Fatalf("%s expectError=%v error=%v secret=%v", tcname, tc.expectError, err, secret) - } - if secret != nil && secret.Data["foo"] != nil { - val := secret.Data["foo"].(map[string]interface{}) - if !reflect.DeepEqual(val, map[string]interface{}{"bar": "baz"}) { - t.Fatalf("expected key 'foo' to yield bar=baz, got: %v", val) - } - } - time.Sleep(time.Second) - - close(cmd.ShutdownCh) - wg.Wait() - }) - } -} - -// TestProxy_Metrics tests that metrics are being properly reported. -func TestProxy_Metrics(t *testing.T) { - // Start a vault server - logger := logging.NewVaultLogger(hclog.Trace) - cluster := vault.NewTestCluster(t, - &vault.CoreConfig{ - Logger: logger, - }, - &vault.TestClusterOptions{ - HandlerFunc: vaulthttp.Handler, - }) - cluster.Start() - defer cluster.Cleanup() - vault.TestWaitActive(t, cluster.Cores[0].Core) - serverClient := cluster.Cores[0].Client - - // Create a config file - listenAddr := generateListenerAddress(t) - config := fmt.Sprintf(` -cache {} - -listener "tcp" { - address = "%s" - tls_disable = true -} -`, listenAddr) - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - ui, cmd := testProxyCommand(t, logger) - cmd.client = serverClient - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - code := cmd.Run([]string{"-config", configPath}) - if code != 0 { - t.Errorf("non-zero return code when running proxy: %d", code) - t.Logf("STDOUT from proxy:\n%s", ui.OutputWriter.String()) - t.Logf("STDERR from proxy:\n%s", ui.ErrorWriter.String()) - } - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - // defer proxy shutdown - defer func() { - cmd.ShutdownCh <- struct{}{} - wg.Wait() - }() - - conf := api.DefaultConfig() - conf.Address = "http://" + listenAddr - proxyClient, err := api.NewClient(conf) - if err != nil { - t.Fatalf("err: %s", err) - } - - req := proxyClient.NewRequest("GET", "/proxy/v1/metrics") - body := request(t, proxyClient, req, 200) - keys := []string{} - for k := range body { - keys = append(keys, k) - } - require.ElementsMatch(t, keys, []string{ - "Counters", - "Samples", - "Timestamp", - "Gauges", - "Points", - }) -} - -// TestProxy_QuitAPI Tests the /proxy/v1/quit API that can be enabled for the proxy. -func TestProxy_QuitAPI(t *testing.T) { - cluster := minimal.NewTestSoloCluster(t, nil) - serverClient := cluster.Cores[0].Client - - // Unset the environment variable so that proxy picks up the right test - // cluster address - defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress)) - err := os.Unsetenv(api.EnvVaultAddress) - if err != nil { - t.Fatal(err) - } - - listenAddr := generateListenerAddress(t) - listenAddr2 := generateListenerAddress(t) - config := fmt.Sprintf(` -vault { - address = "%s" - tls_skip_verify = true -} - -listener "tcp" { - address = "%s" - tls_disable = true -} - -listener "tcp" { - address = "%s" - tls_disable = true - proxy_api { - enable_quit = true - } -} - -cache {} -`, serverClient.Address(), listenAddr, listenAddr2) - - configPath := makeTempFile(t, "config.hcl", config) - defer os.Remove(configPath) - - _, cmd := testProxyCommand(t, nil) - cmd.startedCh = make(chan struct{}) - - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - cmd.Run([]string{"-config", configPath}) - wg.Done() - }() - - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - client, err := api.NewClient(api.DefaultConfig()) - if err != nil { - t.Fatal(err) - } - client.SetToken(serverClient.Token()) - client.SetMaxRetries(0) - err = client.SetAddress("http://" + listenAddr) - if err != nil { - t.Fatal(err) - } - - // First try on listener 1 where the API should be disabled. - resp, err := client.RawRequest(client.NewRequest(http.MethodPost, "/proxy/v1/quit")) - if err == nil { - t.Fatalf("expected error") - } - if resp != nil && resp.StatusCode != http.StatusNotFound { - t.Fatalf("expected %d but got: %d", http.StatusNotFound, resp.StatusCode) - } - - // Now try on listener 2 where the quit API should be enabled. - err = client.SetAddress("http://" + listenAddr2) - if err != nil { - t.Fatal(err) - } - - _, err = client.RawRequest(client.NewRequest(http.MethodPost, "/proxy/v1/quit")) - if err != nil { - t.Fatalf("unexpected error: %s", err) - } - - select { - case <-cmd.ShutdownCh: - case <-time.After(5 * time.Second): - t.Errorf("timeout") - } - - wg.Wait() -} - -// TestProxy_LogFile_CliOverridesConfig tests that the CLI values -// override the config for log files -func TestProxy_LogFile_CliOverridesConfig(t *testing.T) { - // Create basic config - configFile := populateTempFile(t, "proxy-config.hcl", BasicHclConfig) - cfg, err := proxyConfig.LoadConfigFile(configFile.Name()) - if err != nil { - t.Fatal("Cannot load config to test update/merge", err) - } - - // Sanity check that the config value is the current value - assert.Equal(t, "TMPDIR/juan.log", cfg.LogFile) - - // Initialize the command and parse any flags - cmd := &ProxyCommand{BaseCommand: &BaseCommand{}} - f := cmd.Flags() - // Simulate the flag being specified - err = f.Parse([]string{"-log-file=/foo/bar/test.log"}) - if err != nil { - t.Fatal(err) - } - - // Update the config based on the inputs. - cmd.applyConfigOverrides(f, cfg) - - assert.NotEqual(t, "TMPDIR/juan.log", cfg.LogFile) - assert.NotEqual(t, "/squiggle/logs.txt", cfg.LogFile) - assert.Equal(t, "/foo/bar/test.log", cfg.LogFile) -} - -// TestProxy_LogFile_Config tests log file config when loaded from config -func TestProxy_LogFile_Config(t *testing.T) { - configFile := populateTempFile(t, "proxy-config.hcl", BasicHclConfig) - - cfg, err := proxyConfig.LoadConfigFile(configFile.Name()) - if err != nil { - t.Fatal("Cannot load config to test update/merge", err) - } - - // Sanity check that the config value is the current value - assert.Equal(t, "TMPDIR/juan.log", cfg.LogFile, "sanity check on log config failed") - assert.Equal(t, 2, cfg.LogRotateMaxFiles) - assert.Equal(t, 1048576, cfg.LogRotateBytes) - - // Parse the cli flags (but we pass in an empty slice) - cmd := &ProxyCommand{BaseCommand: &BaseCommand{}} - f := cmd.Flags() - err = f.Parse([]string{}) - if err != nil { - t.Fatal(err) - } - - // Should change nothing... - cmd.applyConfigOverrides(f, cfg) - - assert.Equal(t, "TMPDIR/juan.log", cfg.LogFile, "actual config check") - assert.Equal(t, 2, cfg.LogRotateMaxFiles) - assert.Equal(t, 1048576, cfg.LogRotateBytes) -} - -// TestProxy_Config_NewLogger_Default Tests defaults for log level and -// specifically cmd.newLogger() -func TestProxy_Config_NewLogger_Default(t *testing.T) { - cmd := &ProxyCommand{BaseCommand: &BaseCommand{}} - cmd.config = proxyConfig.NewConfig() - logger, err := cmd.newLogger() - - assert.NoError(t, err) - assert.NotNil(t, logger) - assert.Equal(t, hclog.Info.String(), logger.GetLevel().String()) -} - -// TestProxy_Config_ReloadLogLevel Tests reloading updates the log -// level as expected. -func TestProxy_Config_ReloadLogLevel(t *testing.T) { - cmd := &ProxyCommand{BaseCommand: &BaseCommand{}} - var err error - tempDir := t.TempDir() - - // Load an initial config - hcl := strings.ReplaceAll(BasicHclConfig, "TMPDIR", tempDir) - configFile := populateTempFile(t, "proxy-config.hcl", hcl) - cmd.config, err = proxyConfig.LoadConfigFile(configFile.Name()) - if err != nil { - t.Fatal("Cannot load config to test update/merge", err) - } - - // Tweak the loaded config to make sure we can put log files into a temp dir - // and systemd log attempts work fine, this would usually happen during Run. - cmd.logWriter = os.Stdout - cmd.logger, err = cmd.newLogger() - if err != nil { - t.Fatal("logger required for systemd log messages", err) - } - - // Sanity check - assert.Equal(t, "warn", cmd.config.LogLevel) - - // Load a new config - hcl = strings.ReplaceAll(BasicHclConfig2, "TMPDIR", tempDir) - configFile = populateTempFile(t, "proxy-config.hcl", hcl) - err = cmd.reloadConfig([]string{configFile.Name()}) - assert.NoError(t, err) - assert.Equal(t, "debug", cmd.config.LogLevel) -} - -// TestProxy_Config_ReloadTls Tests that the TLS certs for the listener are -// correctly reloaded. -func TestProxy_Config_ReloadTls(t *testing.T) { - var wg sync.WaitGroup - wd, err := os.Getwd() - if err != nil { - t.Fatal("unable to get current working directory") - } - workingDir := filepath.Join(wd, "/proxy/test-fixtures/reload") - fooCert := "reload_foo.pem" - fooKey := "reload_foo.key" - - barCert := "reload_bar.pem" - barKey := "reload_bar.key" - - reloadCert := "reload_cert.pem" - reloadKey := "reload_key.pem" - caPem := "reload_ca.pem" - - tempDir := t.TempDir() - - // Set up initial 'foo' certs - inBytes, err := os.ReadFile(filepath.Join(workingDir, fooCert)) - if err != nil { - t.Fatal("unable to read cert required for test", fooCert, err) - } - err = os.WriteFile(filepath.Join(tempDir, reloadCert), inBytes, 0o777) - if err != nil { - t.Fatal("unable to write temp cert required for test", reloadCert, err) - } - - inBytes, err = os.ReadFile(filepath.Join(workingDir, fooKey)) - if err != nil { - t.Fatal("unable to read cert key required for test", fooKey, err) - } - err = os.WriteFile(filepath.Join(tempDir, reloadKey), inBytes, 0o777) - if err != nil { - t.Fatal("unable to write temp cert key required for test", reloadKey, err) - } - - inBytes, err = os.ReadFile(filepath.Join(workingDir, caPem)) - if err != nil { - t.Fatal("unable to read CA pem required for test", caPem, err) - } - certPool := x509.NewCertPool() - ok := certPool.AppendCertsFromPEM(inBytes) - if !ok { - t.Fatal("not ok when appending CA cert") - } - - replacedHcl := strings.ReplaceAll(BasicHclConfig, "TMPDIR", tempDir) - configFile := populateTempFile(t, "proxy-config.hcl", replacedHcl) - - // Set up Proxy - logger := logging.NewVaultLogger(hclog.Trace) - ui, cmd := testProxyCommand(t, logger) - - wg.Add(1) - args := []string{"-config", configFile.Name()} - go func() { - if code := cmd.Run(args); code != 0 { - output := ui.ErrorWriter.String() + ui.OutputWriter.String() - t.Errorf("got a non-zero exit status: %s", output) - } - wg.Done() - }() - - testCertificateName := func(cn string) error { - conn, err := tls.Dial("tcp", "127.0.0.1:8100", &tls.Config{ - RootCAs: certPool, - }) - if err != nil { - return err - } - defer conn.Close() - if err = conn.Handshake(); err != nil { - return err - } - servName := conn.ConnectionState().PeerCertificates[0].Subject.CommonName - if servName != cn { - return fmt.Errorf("expected %s, got %s", cn, servName) - } - return nil - } - - // Start - select { - case <-cmd.startedCh: - case <-time.After(5 * time.Second): - t.Fatalf("timeout") - } - - if err := testCertificateName("foo.example.com"); err != nil { - t.Fatalf("certificate name didn't check out: %s", err) - } - - // Swap out certs - inBytes, err = os.ReadFile(filepath.Join(workingDir, barCert)) - if err != nil { - t.Fatal("unable to read cert required for test", barCert, err) - } - err = os.WriteFile(filepath.Join(tempDir, reloadCert), inBytes, 0o777) - if err != nil { - t.Fatal("unable to write temp cert required for test", reloadCert, err) - } - - inBytes, err = os.ReadFile(filepath.Join(workingDir, barKey)) - if err != nil { - t.Fatal("unable to read cert key required for test", barKey, err) - } - err = os.WriteFile(filepath.Join(tempDir, reloadKey), inBytes, 0o777) - if err != nil { - t.Fatal("unable to write temp cert key required for test", reloadKey, err) - } - - // Reload - cmd.SighupCh <- struct{}{} - select { - case <-cmd.reloadedCh: - case <-time.After(5 * time.Second): - t.Fatalf("timeout") - } - - if err := testCertificateName("bar.example.com"); err != nil { - t.Fatalf("certificate name didn't check out: %s", err) - } - - // Shut down - cmd.ShutdownCh <- struct{}{} - - wg.Wait() -} diff --git a/command/read.go b/command/read.go index 17b85529e12dfb..3487c5d0d79fb9 100644 --- a/command/read.go +++ b/command/read.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/read_test.go b/command/read_test.go index fbe7ab414fa56c..4a8ec877aa0f8c 100644 --- a/command/read_test.go +++ b/command/read_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/rotate.go b/command/rotate.go index 7a174f34eb9c05..f366a6133b1057 100644 --- a/command/rotate.go +++ b/command/rotate.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/rotate_test.go b/command/rotate_test.go index bfd48f7b026cba..37ac3234059085 100644 --- a/command/rotate_test.go +++ b/command/rotate_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets.go b/command/secrets.go index 320167226c20cf..06e63bec281faf 100644 --- a/command/secrets.go +++ b/command/secrets.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_disable.go b/command/secrets_disable.go index 8d782a524577b3..47a61c5fe094cb 100644 --- a/command/secrets_disable.go +++ b/command/secrets_disable.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_disable_test.go b/command/secrets_disable_test.go index d7c7da713bd79b..567c8956d6308c 100644 --- a/command/secrets_disable_test.go +++ b/command/secrets_disable_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_enable.go b/command/secrets_enable.go index 39ce3bf1b88024..a7e98dce918023 100644 --- a/command/secrets_enable.go +++ b/command/secrets_enable.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_enable_test.go b/command/secrets_enable_test.go index 93984b3c33dda5..127e54a6ac671f 100644 --- a/command/secrets_enable_test.go +++ b/command/secrets_enable_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_list.go b/command/secrets_list.go index 90a8fe8ed973c3..998620f0964a4e 100644 --- a/command/secrets_list.go +++ b/command/secrets_list.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_list_test.go b/command/secrets_list_test.go index 95b60e34071af0..1aeee5bf67294d 100644 --- a/command/secrets_list_test.go +++ b/command/secrets_list_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_move.go b/command/secrets_move.go index b74adcd6af7bd0..458e3bbece7add 100644 --- a/command/secrets_move.go +++ b/command/secrets_move.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_move_test.go b/command/secrets_move_test.go index 3aabaa179bbe3f..153fbeb2cdc0ba 100644 --- a/command/secrets_move_test.go +++ b/command/secrets_move_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_tune.go b/command/secrets_tune.go index 74753e29d333d4..bf8fa3d5937840 100644 --- a/command/secrets_tune.go +++ b/command/secrets_tune.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/secrets_tune_test.go b/command/secrets_tune_test.go index 25b8a7ce791fb8..eabc559f3f1b12 100644 --- a/command/secrets_tune_test.go +++ b/command/secrets_tune_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/server.go b/command/server.go index 73b6659e906a38..31eab7ef0f8884 100644 --- a/command/server.go +++ b/command/server.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( @@ -43,7 +40,6 @@ import ( loghelper "github.com/hashicorp/vault/helper/logging" "github.com/hashicorp/vault/helper/metricsutil" "github.com/hashicorp/vault/helper/namespace" - "github.com/hashicorp/vault/helper/testhelpers/teststorage" "github.com/hashicorp/vault/helper/useragent" vaulthttp "github.com/hashicorp/vault/http" "github.com/hashicorp/vault/internalshared/configutil" @@ -51,7 +47,6 @@ import ( "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/helper/jsonutil" "github.com/hashicorp/vault/sdk/helper/strutil" - "github.com/hashicorp/vault/sdk/helper/testcluster" "github.com/hashicorp/vault/sdk/logical" "github.com/hashicorp/vault/sdk/physical" sr "github.com/hashicorp/vault/serviceregistration" @@ -141,7 +136,6 @@ type ServerCommand struct { flagDevFourCluster bool flagDevTransactional bool flagDevAutoSeal bool - flagDevClusterJson string flagTestVerifyOnly bool flagTestServerConfig bool flagDevConsul bool @@ -373,12 +367,6 @@ func (c *ServerCommand) Flags() *FlagSets { Hidden: true, }) - f.StringVar(&StringVar{ - Name: "dev-cluster-json", - Target: &c.flagDevClusterJson, - Usage: "File to write cluster definition to", - }) - // TODO: should the below flags be public? f.BoolVar(&BoolVar{ Name: "test-verify-only", @@ -548,7 +536,9 @@ func (c *ServerCommand) runRecoveryMode() int { info["Seal Type"] = sealType var seal vault.Seal - defaultSeal := vault.NewDefaultSeal(vaultseal.NewAccess(aeadwrapper.NewShamirWrapper())) + defaultSeal := vault.NewDefaultSeal(&vaultseal.Access{ + Wrapper: aeadwrapper.NewShamirWrapper(), + }) sealLogger := c.logger.ResetNamed(fmt.Sprintf("seal.%s", sealType)) wrapper, sealConfigError = configutil.ConfigureWrapper(configSeal, &infoKeys, &info, sealLogger) if sealConfigError != nil { @@ -561,7 +551,9 @@ func (c *ServerCommand) runRecoveryMode() int { if wrapper == nil { seal = defaultSeal } else { - seal, err = vault.NewAutoSeal(vaultseal.NewAccess(wrapper)) + seal, err = vault.NewAutoSeal(&vaultseal.Access{ + Wrapper: wrapper, + }) if err != nil { c.UI.Error(fmt.Sprintf("error creating auto seal: %v", err)) } @@ -930,69 +922,6 @@ func (c *ServerCommand) InitListeners(config *server.Config, disableClustering b return 0, lns, clusterAddrs, nil } -func configureDevTLS(c *ServerCommand) (func(), *server.Config, string, error) { - var devStorageType string - - switch { - case c.flagDevConsul: - devStorageType = "consul" - case c.flagDevHA && c.flagDevTransactional: - devStorageType = "inmem_transactional_ha" - case !c.flagDevHA && c.flagDevTransactional: - devStorageType = "inmem_transactional" - case c.flagDevHA && !c.flagDevTransactional: - devStorageType = "inmem_ha" - default: - devStorageType = "inmem" - } - - var certDir string - var err error - var config *server.Config - var f func() - - if c.flagDevTLS { - if c.flagDevTLSCertDir != "" { - if _, err = os.Stat(c.flagDevTLSCertDir); err != nil { - return nil, nil, "", err - } - - certDir = c.flagDevTLSCertDir - } else { - if certDir, err = os.MkdirTemp("", "vault-tls"); err != nil { - return nil, nil, certDir, err - } - } - config, err = server.DevTLSConfig(devStorageType, certDir) - - f = func() { - if err := os.Remove(fmt.Sprintf("%s/%s", certDir, server.VaultDevCAFilename)); err != nil { - c.UI.Error(err.Error()) - } - - if err := os.Remove(fmt.Sprintf("%s/%s", certDir, server.VaultDevCertFilename)); err != nil { - c.UI.Error(err.Error()) - } - - if err := os.Remove(fmt.Sprintf("%s/%s", certDir, server.VaultDevKeyFilename)); err != nil { - c.UI.Error(err.Error()) - } - - // Only delete temp directories we made. - if c.flagDevTLSCertDir == "" { - if err := os.Remove(certDir); err != nil { - c.UI.Error(err.Error()) - } - } - } - - } else { - config, err = server.DevConfig(devStorageType) - } - - return f, config, certDir, err -} - func (c *ServerCommand) Run(args []string) int { f := c.Flags() @@ -1033,11 +962,68 @@ func (c *ServerCommand) Run(args []string) int { // Load the configuration var config *server.Config + var err error var certDir string if c.flagDev { - df, cfg, dir, err := configureDevTLS(c) - if df != nil { - defer df() + var devStorageType string + switch { + case c.flagDevConsul: + devStorageType = "consul" + case c.flagDevHA && c.flagDevTransactional: + devStorageType = "inmem_transactional_ha" + case !c.flagDevHA && c.flagDevTransactional: + devStorageType = "inmem_transactional" + case c.flagDevHA && !c.flagDevTransactional: + devStorageType = "inmem_ha" + default: + devStorageType = "inmem" + } + + if c.flagDevTLS { + if c.flagDevTLSCertDir != "" { + _, err := os.Stat(c.flagDevTLSCertDir) + if err != nil { + c.UI.Error(err.Error()) + return 1 + } + + certDir = c.flagDevTLSCertDir + } else { + certDir, err = os.MkdirTemp("", "vault-tls") + if err != nil { + c.UI.Error(err.Error()) + return 1 + } + } + config, err = server.DevTLSConfig(devStorageType, certDir) + + defer func() { + err := os.Remove(fmt.Sprintf("%s/%s", certDir, server.VaultDevCAFilename)) + if err != nil { + c.UI.Error(err.Error()) + } + + err = os.Remove(fmt.Sprintf("%s/%s", certDir, server.VaultDevCertFilename)) + if err != nil { + c.UI.Error(err.Error()) + } + + err = os.Remove(fmt.Sprintf("%s/%s", certDir, server.VaultDevKeyFilename)) + if err != nil { + c.UI.Error(err.Error()) + } + + // Only delete temp directories we made. + if c.flagDevTLSCertDir == "" { + err = os.Remove(certDir) + if err != nil { + c.UI.Error(err.Error()) + } + } + }() + + } else { + config, err = server.DevConfig(devStorageType) } if err != nil { @@ -1045,9 +1031,6 @@ func (c *ServerCommand) Run(args []string) int { return 1 } - config = cfg - certDir = dir - if c.flagDevListenAddr != "" { config.Listeners[0].Address = c.flagDevListenAddr } @@ -1131,6 +1114,15 @@ func (c *ServerCommand) Run(args []string) int { if envLicense := os.Getenv(EnvVaultLicense); envLicense != "" { config.License = envLicense } + if disableSSC := os.Getenv(DisableSSCTokens); disableSSC != "" { + var err error + config.DisableSSCTokens, err = strconv.ParseBool(disableSSC) + if err != nil { + c.UI.Warn(wrapAtLength("WARNING! failed to parse " + + "VAULT_DISABLE_SERVER_SIDE_CONSISTENT_TOKENS env var: " + + "setting to default value false")) + } + } if err := server.ExperimentsFromEnvAndCLI(config, EnvVaultExperiments, c.flagExperiments); err != nil { c.UI.Error(err.Error()) @@ -1164,18 +1156,16 @@ func (c *ServerCommand) Run(args []string) int { metricsHelper := metricsutil.NewMetricsHelper(inmemMetrics, prometheusEnabled) // Initialize the storage backend - var backend physical.Backend - if !c.flagDev || config.Storage != nil { - backend, err = c.setupStorage(config) - if err != nil { - c.UI.Error(err.Error()) - return 1 - } - // Prevent server startup if migration is active - // TODO: Use OpenTelemetry to integrate this into Diagnose - if c.storageMigrationActive(backend) { - return 1 - } + backend, err := c.setupStorage(config) + if err != nil { + c.UI.Error(err.Error()) + return 1 + } + + // Prevent server startup if migration is active + // TODO: Use OpenTelemetry to integrate this into Diagnose + if c.storageMigrationActive(backend) { + return 1 } // Initialize the Service Discovery, if there is one @@ -1484,8 +1474,7 @@ func (c *ServerCommand) Run(args []string) int { } // If we're in Dev mode, then initialize the core - clusterJson := &testcluster.ClusterJson{} - err = initDevCore(c, &coreConfig, config, core, certDir, clusterJson) + err = initDevCore(c, &coreConfig, config, core, certDir) if err != nil { c.UI.Error(err.Error()) return 1 @@ -1545,34 +1534,6 @@ func (c *ServerCommand) Run(args []string) int { // Notify systemd that the server is ready (if applicable) c.notifySystemd(systemd.SdNotifyReady) - if c.flagDev { - protocol := "http://" - if c.flagDevTLS { - protocol = "https://" - } - clusterJson.Nodes = []testcluster.ClusterNode{ - { - APIAddress: protocol + config.Listeners[0].Address, - }, - } - if c.flagDevTLS { - clusterJson.CACertPath = fmt.Sprintf("%s/%s", certDir, server.VaultDevCAFilename) - } - - if c.flagDevClusterJson != "" && !c.flagDevThreeNode { - b, err := jsonutil.EncodeJSON(clusterJson) - if err != nil { - c.UI.Error(fmt.Sprintf("Error encoding cluster.json: %s", err)) - return 1 - } - err = os.WriteFile(c.flagDevClusterJson, b, 0o600) - if err != nil { - c.UI.Error(fmt.Sprintf("Error writing cluster.json %q: %s", c.flagDevClusterJson, err)) - return 1 - } - } - } - defer func() { if err := c.removePidFile(config.PidFile); err != nil { c.UI.Error(fmt.Sprintf("Error deleting the PID file: %s", err)) @@ -1670,9 +1631,6 @@ func (c *ServerCommand) Run(args []string) int { c.UI.Error(err.Error()) } - if err := core.ReloadCensus(); err != nil { - c.UI.Error(err.Error()) - } select { case c.licenseReloadedCh <- err: default: @@ -2003,33 +1961,14 @@ func (c *ServerCommand) enableDev(core *vault.Core, coreConfig *vault.CoreConfig } func (c *ServerCommand) enableThreeNodeDevCluster(base *vault.CoreConfig, info map[string]string, infoKeys []string, devListenAddress, tempDir string) int { - conf, opts := teststorage.ClusterSetup(base, &vault.TestClusterOptions{ + testCluster := vault.NewTestCluster(&testing.RuntimeT{}, base, &vault.TestClusterOptions{ HandlerFunc: vaulthttp.Handler, BaseListenAddress: c.flagDevListenAddr, Logger: c.logger, TempDir: tempDir, - DefaultHandlerProperties: vault.HandlerProperties{ - ListenerConfig: &configutil.Listener{ - Profiling: configutil.ListenerProfiling{ - UnauthenticatedPProfAccess: true, - }, - Telemetry: configutil.ListenerTelemetry{ - UnauthenticatedMetricsAccess: true, - }, - }, - }, - }, nil) - testCluster := vault.NewTestCluster(&testing.RuntimeT{}, conf, opts) + }) defer c.cleanupGuard.Do(testCluster.Cleanup) - if constants.IsEnterprise { - err := testcluster.WaitForActiveNodeAndPerfStandbys(context.Background(), testCluster) - if err != nil { - c.UI.Error(fmt.Sprintf("perf standbys didn't become ready: %v", err)) - return 1 - } - } - info["cluster parameters path"] = testCluster.TempDir infoKeys = append(infoKeys, "cluster parameters path") @@ -2170,29 +2109,6 @@ func (c *ServerCommand) enableThreeNodeDevCluster(base *vault.CoreConfig, info m testCluster.TempDir, )) - if c.flagDevClusterJson != "" { - clusterJson := testcluster.ClusterJson{ - Nodes: []testcluster.ClusterNode{}, - CACertPath: filepath.Join(testCluster.TempDir, "ca_cert.pem"), - RootToken: testCluster.RootToken, - } - for _, core := range testCluster.Cores { - clusterJson.Nodes = append(clusterJson.Nodes, testcluster.ClusterNode{ - APIAddress: core.Client.Address(), - }) - } - b, err := jsonutil.EncodeJSON(clusterJson) - if err != nil { - c.UI.Error(fmt.Sprintf("Error encoding cluster.json: %s", err)) - return 1 - } - err = os.WriteFile(c.flagDevClusterJson, b, 0o600) - if err != nil { - c.UI.Error(fmt.Sprintf("Error writing cluster.json %q: %s", c.flagDevClusterJson, err)) - return 1 - } - } - // Output the header that the server has started c.UI.Output("==> Vault server started! Log data will stream in below:\n") @@ -2489,8 +2405,7 @@ func setSeal(c *ServerCommand, config *server.Config, infoKeys []string, info ma var barrierWrapper wrapping.Wrapper if c.flagDevAutoSeal { var err error - access, _ := vaultseal.NewTestSeal(nil) - barrierSeal, err = vault.NewAutoSeal(access) + barrierSeal, err = vault.NewAutoSeal(vaultseal.NewTestSeal(nil)) if err != nil { return nil, nil, nil, nil, nil, err } @@ -2521,7 +2436,9 @@ func setSeal(c *ServerCommand, config *server.Config, infoKeys []string, info ma var seal vault.Seal sealLogger := c.logger.ResetNamed(fmt.Sprintf("seal.%s", sealType)) c.allLoggers = append(c.allLoggers, sealLogger) - defaultSeal := vault.NewDefaultSeal(vaultseal.NewAccess(aeadwrapper.NewShamirWrapper())) + defaultSeal := vault.NewDefaultSeal(&vaultseal.Access{ + Wrapper: aeadwrapper.NewShamirWrapper(), + }) var sealInfoKeys []string sealInfoMap := map[string]string{} wrapper, sealConfigError = configutil.ConfigureWrapper(configSeal, &sealInfoKeys, &sealInfoMap, sealLogger) @@ -2535,7 +2452,9 @@ func setSeal(c *ServerCommand, config *server.Config, infoKeys []string, info ma seal = defaultSeal } else { var err error - seal, err = vault.NewAutoSeal(vaultseal.NewAccess(wrapper)) + seal, err = vault.NewAutoSeal(&vaultseal.Access{ + Wrapper: wrapper, + }) if err != nil { return nil, nil, nil, nil, nil, err } @@ -2823,7 +2742,7 @@ func runListeners(c *ServerCommand, coreConfig *vault.CoreConfig, config *server return nil } -func initDevCore(c *ServerCommand, coreConfig *vault.CoreConfig, config *server.Config, core *vault.Core, certDir string, clusterJSON *testcluster.ClusterJson) error { +func initDevCore(c *ServerCommand, coreConfig *vault.CoreConfig, config *server.Config, core *vault.Core, certDir string) error { if c.flagDev && !c.flagDevSkipInit { init, err := c.enableDev(core, coreConfig) @@ -2831,10 +2750,6 @@ func initDevCore(c *ServerCommand, coreConfig *vault.CoreConfig, config *server. return fmt.Errorf("Error initializing Dev mode: %s", err) } - if clusterJSON != nil { - clusterJSON.RootToken = init.RootToken - } - var plugins, pluginsNotLoaded []string if c.flagDevPluginDir != "" && c.flagDevPluginInit { diff --git a/command/server/config.go b/command/server/config.go index 34e4848004b724..a9637f22477fba 100644 --- a/command/server/config.go +++ b/command/server/config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( @@ -24,8 +21,6 @@ import ( "github.com/hashicorp/vault/internalshared/configutil" "github.com/hashicorp/vault/sdk/helper/consts" "github.com/hashicorp/vault/sdk/helper/strutil" - "github.com/hashicorp/vault/sdk/helper/testcluster" - "github.com/mitchellh/mapstructure" ) const ( @@ -1209,12 +1204,3 @@ func (c *Config) found(s, k string) { delete(c.UnusedKeys, s) c.FoundKeys = append(c.FoundKeys, k) } - -func (c *Config) ToVaultNodeConfig() (*testcluster.VaultNodeConfig, error) { - var vnc testcluster.VaultNodeConfig - err := mapstructure.Decode(c, &vnc) - if err != nil { - return nil, err - } - return &vnc, nil -} diff --git a/command/server/config_custom_response_headers_test.go b/command/server/config_custom_response_headers_test.go index 11c4300b4160a4..5380568c251079 100644 --- a/command/server/config_custom_response_headers_test.go +++ b/command/server/config_custom_response_headers_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/config_oss_test.go b/command/server/config_oss_test.go index 4a08ddf78ad03b..f64670e03a556c 100644 --- a/command/server/config_oss_test.go +++ b/command/server/config_oss_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package server diff --git a/command/server/config_telemetry_test.go b/command/server/config_telemetry_test.go index 54245d05151a19..581710f565a7e9 100644 --- a/command/server/config_telemetry_test.go +++ b/command/server/config_telemetry_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/config_test.go b/command/server/config_test.go index b570f1e7662563..ed40f2667640d2 100644 --- a/command/server/config_test.go +++ b/command/server/config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/config_test_helpers.go b/command/server/config_test_helpers.go index f5136449cef499..94535b43829a5b 100644 --- a/command/server/config_test_helpers.go +++ b/command/server/config_test_helpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( @@ -503,8 +500,8 @@ func testUnknownFieldValidation(t *testing.T) { Problem: "unknown or unsupported field bad_value found in configuration", Position: token.Pos{ Filename: "./test-fixtures/config.hcl", - Offset: 651, - Line: 37, + Offset: 583, + Line: 34, Column: 5, }, }, @@ -851,9 +848,6 @@ listener "tcp" { agent_api { enable_quit = true } - proxy_api { - enable_quit = true - } }`)) config := Config{ @@ -894,9 +888,6 @@ listener "tcp" { AgentAPI: &configutil.AgentAPI{ EnableQuit: true, }, - ProxyAPI: &configutil.ProxyAPI{ - EnableQuit: true, - }, CustomResponseHeaders: DefaultCustomHeaders, }, }, @@ -1106,7 +1097,6 @@ func testParseSeals(t *testing.T) { }, }, } - addExpectedDefaultEntConfig(expected) config.Prune() require.Equal(t, config, expected) } diff --git a/command/server/config_test_helpers_util.go b/command/server/config_test_helpers_util.go index 7f8c02e1c6a035..63fa3cfe6a80e3 100644 --- a/command/server/config_test_helpers_util.go +++ b/command/server/config_test_helpers_util.go @@ -1,10 +1,6 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package server func addExpectedEntConfig(c *Config, sentinelModules []string) {} -func addExpectedDefaultEntConfig(c *Config) {} func addExpectedEntSanitizedConfig(c map[string]interface{}, sentinelModules []string) {} diff --git a/command/server/config_util.go b/command/server/config_util.go index 3570b9a59bb27a..feefc6092aac33 100644 --- a/command/server/config_util.go +++ b/command/server/config_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package server diff --git a/command/server/hcp_link_config_test.go b/command/server/hcp_link_config_test.go index 121b855138c342..51f5a5ec880059 100644 --- a/command/server/hcp_link_config_test.go +++ b/command/server/hcp_link_config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/listener.go b/command/server/listener.go index 19b89565ab5c96..78acbd3e7b92c6 100644 --- a/command/server/listener.go +++ b/command/server/listener.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/listener_tcp.go b/command/server/listener_tcp.go index 29b601c4bfaad0..dbba4b40e88cc3 100644 --- a/command/server/listener_tcp.go +++ b/command/server/listener_tcp.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/listener_tcp_test.go b/command/server/listener_tcp_test.go index 6d73cf2cb32ddd..5ebf6111413ee0 100644 --- a/command/server/listener_tcp_test.go +++ b/command/server/listener_tcp_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/listener_test.go b/command/server/listener_test.go index f4d555c5c2ca27..b1bf62ddfedfde 100644 --- a/command/server/listener_test.go +++ b/command/server/listener_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/listener_unix.go b/command/server/listener_unix.go index d5ea772eb476ae..3740b58f7c6761 100644 --- a/command/server/listener_unix.go +++ b/command/server/listener_unix.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/listener_unix_test.go b/command/server/listener_unix_test.go index 91eaf121ccde84..e5254a294795cb 100644 --- a/command/server/listener_unix_test.go +++ b/command/server/listener_unix_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server/server_seal_transit_acc_test.go b/command/server/server_seal_transit_acc_test.go index 074893fcfb88c3..7f357b7f2a9621 100644 --- a/command/server/server_seal_transit_acc_test.go +++ b/command/server/server_seal_transit_acc_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( @@ -14,8 +11,8 @@ import ( "github.com/hashicorp/go-uuid" "github.com/hashicorp/vault/api" + "github.com/hashicorp/vault/helper/testhelpers/docker" "github.com/hashicorp/vault/internalshared/configutil" - "github.com/hashicorp/vault/sdk/helper/docker" ) func TestTransitWrapper_Lifecycle(t *testing.T) { diff --git a/command/server/test-fixtures/config-dir/baz.hcl b/command/server/test-fixtures/config-dir/baz.hcl index 171a07dd7a99d3..47146c717c172f 100644 --- a/command/server/test-fixtures/config-dir/baz.hcl +++ b/command/server/test-fixtures/config-dir/baz.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - telemetry { statsd_address = "baz" statsite_address = "qux" diff --git a/command/server/test-fixtures/config-dir/foo.hcl b/command/server/test-fixtures/config-dir/foo.hcl index 0ef439a8037ea4..f538ede1ba4af5 100644 --- a/command/server/test-fixtures/config-dir/foo.hcl +++ b/command/server/test-fixtures/config-dir/foo.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/config.hcl b/command/server/test-fixtures/config.hcl index 2a53289bf4c598..38ad4752416918 100644 --- a/command/server/test-fixtures/config.hcl +++ b/command/server/test-fixtures/config.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/config2.hcl b/command/server/test-fixtures/config2.hcl index 4d9cdf7cba1a1d..7b1dbfd56faa81 100644 --- a/command/server/test-fixtures/config2.hcl +++ b/command/server/test-fixtures/config2.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/config3.hcl b/command/server/test-fixtures/config3.hcl index 96b93318ffdadf..1023284e09e51b 100644 --- a/command/server/test-fixtures/config3.hcl +++ b/command/server/test-fixtures/config3.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true log_requests_level = "Basic" diff --git a/command/server/test-fixtures/config4.hcl b/command/server/test-fixtures/config4.hcl index be49453da03fad..b620f3c7e75f67 100644 --- a/command/server/test-fixtures/config4.hcl +++ b/command/server/test-fixtures/config4.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true ui = true diff --git a/command/server/test-fixtures/config5.hcl b/command/server/test-fixtures/config5.hcl index 3f5b2460822f2d..3b3c64c7e0af9e 100644 --- a/command/server/test-fixtures/config5.hcl +++ b/command/server/test-fixtures/config5.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/config_bad_https_storage.hcl b/command/server/test-fixtures/config_bad_https_storage.hcl index b53673c353955a..f8b5d7734be2ea 100644 --- a/command/server/test-fixtures/config_bad_https_storage.hcl +++ b/command/server/test-fixtures/config_bad_https_storage.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/config_custom_response_headers_1.hcl b/command/server/test-fixtures/config_custom_response_headers_1.hcl index bc458da53a2521..c2f868c2f146c9 100644 --- a/command/server/test-fixtures/config_custom_response_headers_1.hcl +++ b/command/server/test-fixtures/config_custom_response_headers_1.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - storage "inmem" {} listener "tcp" { address = "127.0.0.1:8200" diff --git a/command/server/test-fixtures/config_custom_response_headers_multiple_listeners.hcl b/command/server/test-fixtures/config_custom_response_headers_multiple_listeners.hcl index 9ae5b408d647fa..11aa099232f916 100644 --- a/command/server/test-fixtures/config_custom_response_headers_multiple_listeners.hcl +++ b/command/server/test-fixtures/config_custom_response_headers_multiple_listeners.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - storage "inmem" {} listener "tcp" { address = "127.0.0.1:8200" diff --git a/command/server/test-fixtures/config_diagnose_hastorage_bad_https.hcl b/command/server/test-fixtures/config_diagnose_hastorage_bad_https.hcl index 9e1ac5e10bd6c8..deded2ddf17086 100644 --- a/command/server/test-fixtures/config_diagnose_hastorage_bad_https.hcl +++ b/command/server/test-fixtures/config_diagnose_hastorage_bad_https.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/config_diagnose_ok.hcl b/command/server/test-fixtures/config_diagnose_ok.hcl index 0b903ee6e0ee9d..a3f70540bf4d3f 100644 --- a/command/server/test-fixtures/config_diagnose_ok.hcl +++ b/command/server/test-fixtures/config_diagnose_ok.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/config_raft.hcl b/command/server/test-fixtures/config_raft.hcl index 7cb585b39b5cb9..c23a434744ba5a 100644 --- a/command/server/test-fixtures/config_raft.hcl +++ b/command/server/test-fixtures/config_raft.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/config_seals.hcl b/command/server/test-fixtures/config_seals.hcl index 6fdd13381843d9..7917dc1b79c381 100644 --- a/command/server/test-fixtures/config_seals.hcl +++ b/command/server/test-fixtures/config_seals.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - listener "tcp" { address = "127.0.0.1:443" } diff --git a/command/server/test-fixtures/config_small.hcl b/command/server/test-fixtures/config_small.hcl index a8e3c7a605c634..cfbc28db8fdde8 100644 --- a/command/server/test-fixtures/config_small.hcl +++ b/command/server/test-fixtures/config_small.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - storage "raft" { path = "/path/to/raft" node_id = "raft_node_1" diff --git a/command/server/test-fixtures/diagnose_bad_https_consul_sr.hcl b/command/server/test-fixtures/diagnose_bad_https_consul_sr.hcl index bc5a71e13a5866..6faecaab73fbf4 100644 --- a/command/server/test-fixtures/diagnose_bad_https_consul_sr.hcl +++ b/command/server/test-fixtures/diagnose_bad_https_consul_sr.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/diagnose_bad_telemetry1.hcl b/command/server/test-fixtures/diagnose_bad_telemetry1.hcl index a634b162ebc50e..f7629bdd02d26d 100644 --- a/command/server/test-fixtures/diagnose_bad_telemetry1.hcl +++ b/command/server/test-fixtures/diagnose_bad_telemetry1.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true ui = true diff --git a/command/server/test-fixtures/diagnose_bad_telemetry2.hcl b/command/server/test-fixtures/diagnose_bad_telemetry2.hcl index afb195d5538366..5c967e3ef926d8 100644 --- a/command/server/test-fixtures/diagnose_bad_telemetry2.hcl +++ b/command/server/test-fixtures/diagnose_bad_telemetry2.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true ui = true diff --git a/command/server/test-fixtures/diagnose_bad_telemetry3.hcl b/command/server/test-fixtures/diagnose_bad_telemetry3.hcl index 422351febc3328..f9669258460b3c 100644 --- a/command/server/test-fixtures/diagnose_bad_telemetry3.hcl +++ b/command/server/test-fixtures/diagnose_bad_telemetry3.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true ui = true diff --git a/command/server/test-fixtures/diagnose_ok_storage_direct_access.hcl b/command/server/test-fixtures/diagnose_ok_storage_direct_access.hcl index cda9e2a2da93e4..3b6a9abf290ab8 100644 --- a/command/server/test-fixtures/diagnose_ok_storage_direct_access.hcl +++ b/command/server/test-fixtures/diagnose_ok_storage_direct_access.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/diagnose_raft_no_bolt_folder.hcl b/command/server/test-fixtures/diagnose_raft_no_bolt_folder.hcl index e28c1bcb1ae40f..d92186f2642d84 100644 --- a/command/server/test-fixtures/diagnose_raft_no_bolt_folder.hcl +++ b/command/server/test-fixtures/diagnose_raft_no_bolt_folder.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - storage "raft" { path = "/path/to/raft/data" node_id = "raft_node_1" diff --git a/command/server/test-fixtures/diagnose_seal_transit_tls_check.hcl b/command/server/test-fixtures/diagnose_seal_transit_tls_check.hcl index 0c0edcfce22b39..a51c27b37a86fa 100644 --- a/command/server/test-fixtures/diagnose_seal_transit_tls_check.hcl +++ b/command/server/test-fixtures/diagnose_seal_transit_tls_check.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/hcp_link_config.hcl b/command/server/test-fixtures/hcp_link_config.hcl index 1a909e3b48a41c..fc25b760e77c8c 100644 --- a/command/server/test-fixtures/hcp_link_config.hcl +++ b/command/server/test-fixtures/hcp_link_config.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - storage "inmem" {} listener "tcp" { address = "127.0.0.1:8200" diff --git a/command/server/test-fixtures/nostore_config.hcl b/command/server/test-fixtures/nostore_config.hcl index a80e385b3d6cfa..667570cb0c1c07 100644 --- a/command/server/test-fixtures/nostore_config.hcl +++ b/command/server/test-fixtures/nostore_config.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/raft_retry_join.hcl b/command/server/test-fixtures/raft_retry_join.hcl index 6f7fe9e4771c0b..a4f1f3df0139bb 100644 --- a/command/server/test-fixtures/raft_retry_join.hcl +++ b/command/server/test-fixtures/raft_retry_join.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - storage "raft" { path = "/storage/path/raft" node_id = "raft1" diff --git a/command/server/test-fixtures/telemetry/filter_default_override.hcl b/command/server/test-fixtures/telemetry/filter_default_override.hcl index 4fc70e9ebf09f0..04e55f646cb12c 100644 --- a/command/server/test-fixtures/telemetry/filter_default_override.hcl +++ b/command/server/test-fixtures/telemetry/filter_default_override.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_mlock = true ui = true diff --git a/command/server/test-fixtures/telemetry/valid_prefix_filter.hcl b/command/server/test-fixtures/telemetry/valid_prefix_filter.hcl index 055f12259e4666..814dd1c825f9e6 100644 --- a/command/server/test-fixtures/telemetry/valid_prefix_filter.hcl +++ b/command/server/test-fixtures/telemetry/valid_prefix_filter.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_mlock = true ui = true diff --git a/command/server/test-fixtures/tls_config_ok.hcl b/command/server/test-fixtures/tls_config_ok.hcl index 4cbd4fa4644c93..0dee4b4836039b 100644 --- a/command/server/test-fixtures/tls_config_ok.hcl +++ b/command/server/test-fixtures/tls_config_ok.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - disable_cache = true disable_mlock = true diff --git a/command/server/test-fixtures/unauth_in_flight_access.hcl b/command/server/test-fixtures/unauth_in_flight_access.hcl index c191f139b35b79..eda6641276f1dc 100644 --- a/command/server/test-fixtures/unauth_in_flight_access.hcl +++ b/command/server/test-fixtures/unauth_in_flight_access.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - storage "inmem" {} listener "tcp" { address = "127.0.0.1:8200" diff --git a/command/server/tls_util.go b/command/server/tls_util.go index a038c2ae629053..34f6a72f61ea6d 100644 --- a/command/server/tls_util.go +++ b/command/server/tls_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package server import ( diff --git a/command/server_noprofile.go b/command/server_noprofile.go index 1cf516a1aaf3d6..edaebeb13c05ca 100644 --- a/command/server_noprofile.go +++ b/command/server_noprofile.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !memprofiler package command diff --git a/command/server_profile.go b/command/server_profile.go index 42f07d2e8519f6..1ce87a6841532b 100644 --- a/command/server_profile.go +++ b/command/server_profile.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build memprofiler package command diff --git a/command/server_test.go b/command/server_test.go index bfe5b14ddebfe2..24dda4a76dafec 100644 --- a/command/server_test.go +++ b/command/server_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !race && !hsm && !fips_140_3 // NOTE: we can't use this with HSM. We can't set testing mode on and it's not @@ -330,66 +327,3 @@ func TestServer_DevTLS(t *testing.T) { require.Equal(t, 0, retCode, output) require.Contains(t, output, `tls: "enabled"`) } - -// TestConfigureDevTLS verifies the various logic paths that flow through the -// configureDevTLS function. -func TestConfigureDevTLS(t *testing.T) { - testcases := []struct { - ServerCommand *ServerCommand - DeferFuncNotNil bool - ConfigNotNil bool - TLSDisable bool - CertPathEmpty bool - ErrNotNil bool - TestDescription string - }{ - { - ServerCommand: &ServerCommand{ - flagDevTLS: false, - }, - ConfigNotNil: true, - TLSDisable: true, - CertPathEmpty: true, - ErrNotNil: false, - TestDescription: "flagDev is false, nothing will be configured", - }, - { - ServerCommand: &ServerCommand{ - flagDevTLS: true, - flagDevTLSCertDir: "", - }, - DeferFuncNotNil: true, - ConfigNotNil: true, - ErrNotNil: false, - TestDescription: "flagDevTLSCertDir is empty", - }, - { - ServerCommand: &ServerCommand{ - flagDevTLS: true, - flagDevTLSCertDir: "@/#", - }, - CertPathEmpty: true, - ErrNotNil: true, - TestDescription: "flagDevTLSCertDir is set to something invalid", - }, - } - - for _, testcase := range testcases { - fun, cfg, certPath, err := configureDevTLS(testcase.ServerCommand) - if fun != nil { - // If a function is returned, call it right away to clean up - // files created in the temporary directory before anything else has - // a chance to fail this test. - fun() - } - - require.Equal(t, testcase.DeferFuncNotNil, (fun != nil), "test description %s", testcase.TestDescription) - require.Equal(t, testcase.ConfigNotNil, cfg != nil, "test description %s", testcase.TestDescription) - if testcase.ConfigNotNil { - require.True(t, len(cfg.Listeners) > 0, "test description %s", testcase.TestDescription) - require.Equal(t, testcase.TLSDisable, cfg.Listeners[0].TLSDisable, "test description %s", testcase.TestDescription) - } - require.Equal(t, testcase.CertPathEmpty, len(certPath) == 0, "test description %s", testcase.TestDescription) - require.Equal(t, testcase.ErrNotNil, (err != nil), "test description %s", testcase.TestDescription) - } -} diff --git a/command/server_util.go b/command/server_util.go index 7bf3196fc8d579..d5d9c8f4f3341f 100644 --- a/command/server_util.go +++ b/command/server_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/ssh.go b/command/ssh.go index 90bb03080118c7..5ac2a3dd1484cc 100644 --- a/command/ssh.go +++ b/command/ssh.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/ssh_test.go b/command/ssh_test.go index 137e541a8681fe..3d2c8e0c886efa 100644 --- a/command/ssh_test.go +++ b/command/ssh_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/status.go b/command/status.go index 0b7c6197425672..770adfcf3d48e6 100644 --- a/command/status.go +++ b/command/status.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/status_test.go b/command/status_test.go index 5731945ae78f23..e34a72c578d103 100644 --- a/command/status_test.go +++ b/command/status_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/test-fixtures/config.hcl b/command/test-fixtures/config.hcl index 164acd29cc8025..31de773909c9a6 100644 --- a/command/test-fixtures/config.hcl +++ b/command/test-fixtures/config.hcl @@ -1,4 +1 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - token_helper = "foo" diff --git a/command/test-fixtures/policy.hcl b/command/test-fixtures/policy.hcl index 267fc5cecdc591..7d46bdeabe16f1 100644 --- a/command/test-fixtures/policy.hcl +++ b/command/test-fixtures/policy.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - path "secret/foo" { policy = "write" } diff --git a/command/token.go b/command/token.go index 7b15275283bc14..20af230a5b308e 100644 --- a/command/token.go +++ b/command/token.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token/helper.go b/command/token/helper.go index c8ce76326a7b73..ff559e40d447d4 100644 --- a/command/token/helper.go +++ b/command/token/helper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token // TokenHelper is an interface that contains basic operations that must be diff --git a/command/token/helper_external.go b/command/token/helper_external.go index 12557a4b306090..83f5f8907291d0 100644 --- a/command/token/helper_external.go +++ b/command/token/helper_external.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token import ( diff --git a/command/token/helper_external_test.go b/command/token/helper_external_test.go index d95c8890eaf349..b49dd93343ccdb 100644 --- a/command/token/helper_external_test.go +++ b/command/token/helper_external_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token import ( diff --git a/command/token/helper_internal.go b/command/token/helper_internal.go index aeb4faa9bef39b..c5f35721ee9e2f 100644 --- a/command/token/helper_internal.go +++ b/command/token/helper_internal.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token import ( diff --git a/command/token/helper_internal_test.go b/command/token/helper_internal_test.go index e68359c8200865..18f3abae56f6cc 100644 --- a/command/token/helper_internal_test.go +++ b/command/token/helper_internal_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token import ( diff --git a/command/token/helper_testing.go b/command/token/helper_testing.go index e95ff35580479c..93465931b78903 100644 --- a/command/token/helper_testing.go +++ b/command/token/helper_testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token import ( diff --git a/command/token/testing.go b/command/token/testing.go index 85da0840c84ac7..725f1276a052aa 100644 --- a/command/token/testing.go +++ b/command/token/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package token import ( diff --git a/command/token_capabilities.go b/command/token_capabilities.go index f7e359c06f13f1..093765630d664c 100644 --- a/command/token_capabilities.go +++ b/command/token_capabilities.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_capabilities_test.go b/command/token_capabilities_test.go index 14b0f276583414..874db49129af40 100644 --- a/command/token_capabilities_test.go +++ b/command/token_capabilities_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_create.go b/command/token_create.go index 1efee5ebff0629..a8dc2f03ea7a36 100644 --- a/command/token_create.go +++ b/command/token_create.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_create_test.go b/command/token_create_test.go index 31bbd244b2e6be..a7e767926ae6a2 100644 --- a/command/token_create_test.go +++ b/command/token_create_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_lookup.go b/command/token_lookup.go index 24161399a2c9bd..55284a29d1bc6b 100644 --- a/command/token_lookup.go +++ b/command/token_lookup.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_lookup_test.go b/command/token_lookup_test.go index 3f5636527661ee..e027b3f7c97bf0 100644 --- a/command/token_lookup_test.go +++ b/command/token_lookup_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_renew.go b/command/token_renew.go index 7a61487b5a25e4..88d6fa20fb2c2e 100644 --- a/command/token_renew.go +++ b/command/token_renew.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_renew_test.go b/command/token_renew_test.go index 29d9292018cf44..c958d4d55af671 100644 --- a/command/token_renew_test.go +++ b/command/token_renew_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_revoke.go b/command/token_revoke.go index 48ccc27ac261b0..f6eb72101bb374 100644 --- a/command/token_revoke.go +++ b/command/token_revoke.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/token_revoke_test.go b/command/token_revoke_test.go index 6ff8898301a18a..7f66e9d4a0d88e 100644 --- a/command/token_revoke_test.go +++ b/command/token_revoke_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/transit.go b/command/transit.go index a48fef7852638a..5278bc84f9315a 100644 --- a/command/transit.go +++ b/command/transit.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/transit_import_key.go b/command/transit_import_key.go index 3eea70093bc1e0..922a2f487d6907 100644 --- a/command/transit_import_key.go +++ b/command/transit_import_key.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/transit_import_key_test.go b/command/transit_import_key_test.go index e01c03fa00bb64..d13c032048ff76 100644 --- a/command/transit_import_key_test.go +++ b/command/transit_import_key_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/transit_import_key_version.go b/command/transit_import_key_version.go index 1a25078923bb85..2593b02a016a67 100644 --- a/command/transit_import_key_version.go +++ b/command/transit_import_key_version.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/unwrap.go b/command/unwrap.go index 1f920e7806eb31..53ff0787de2803 100644 --- a/command/unwrap.go +++ b/command/unwrap.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/unwrap_test.go b/command/unwrap_test.go index 608edff51778e6..4a06418b027ce3 100644 --- a/command/unwrap_test.go +++ b/command/unwrap_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/util.go b/command/util.go index e24d65d7259f13..8c0215250a7f3b 100644 --- a/command/util.go +++ b/command/util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/version.go b/command/version.go index e9b17227b0bfe3..5e5503ba8b9f4a 100644 --- a/command/version.go +++ b/command/version.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/version_history.go b/command/version_history.go index b2dfbae425d9ae..56d3deb9635479 100644 --- a/command/version_history.go +++ b/command/version_history.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/version_history_test.go b/command/version_history_test.go index c011a4bf4d7b7f..69bd5678886830 100644 --- a/command/version_history_test.go +++ b/command/version_history_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/version_test.go b/command/version_test.go index ede21e62a9cf79..0f59e9ffcb6cd9 100644 --- a/command/version_test.go +++ b/command/version_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/write.go b/command/write.go index 2cc93b8a35049c..3daa2bae60ff48 100644 --- a/command/write.go +++ b/command/write.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/command/write_test.go b/command/write_test.go index 9b76d391c8540c..03aab4c79af2cc 100644 --- a/command/write_test.go +++ b/command/write_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package command import ( diff --git a/enos/ci/aws-nuke.yml b/enos/ci/aws-nuke.yml index 50a567704925e3..6089d0d30573d2 100644 --- a/enos/ci/aws-nuke.yml +++ b/enos/ci/aws-nuke.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - regions: - eu-north-1 - ap-south-1 diff --git a/enos/ci/bootstrap/main.tf b/enos/ci/bootstrap/main.tf index c5ce812d90b245..804f1e66bfa4e2 100644 --- a/enos/ci/bootstrap/main.tf +++ b/enos/ci/bootstrap/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { aws = { diff --git a/enos/ci/bootstrap/outputs.tf b/enos/ci/bootstrap/outputs.tf index e6ff37270bd54f..858318e4cd5cff 100644 --- a/enos/ci/bootstrap/outputs.tf +++ b/enos/ci/bootstrap/outputs.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - output "keys" { value = { "us-east-1" = { diff --git a/enos/ci/bootstrap/variables.tf b/enos/ci/bootstrap/variables.tf index 3fb53bc5177754..3aab3449f368dc 100644 --- a/enos/ci/bootstrap/variables.tf +++ b/enos/ci/bootstrap/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "aws_ssh_public_key" { description = "The public key to use for the ssh key" type = string diff --git a/enos/ci/service-user-iam/main.tf b/enos/ci/service-user-iam/main.tf index c70dbaa13be503..bea2d46a43096e 100644 --- a/enos/ci/service-user-iam/main.tf +++ b/enos/ci/service-user-iam/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { aws = { diff --git a/enos/ci/service-user-iam/outputs.tf b/enos/ci/service-user-iam/outputs.tf index ba980d59d076b2..d4ba89910df9e7 100644 --- a/enos/ci/service-user-iam/outputs.tf +++ b/enos/ci/service-user-iam/outputs.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - output "ci_role" { value = { name = aws_iam_role.role.name diff --git a/enos/ci/service-user-iam/providers.tf b/enos/ci/service-user-iam/providers.tf index 7baba3344006bb..09c86d7bae4e24 100644 --- a/enos/ci/service-user-iam/providers.tf +++ b/enos/ci/service-user-iam/providers.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - provider "aws" { region = "us-east-1" alias = "us_east_1" diff --git a/enos/ci/service-user-iam/service-quotas.tf b/enos/ci/service-user-iam/service-quotas.tf index 3044d41778c423..544f311504e7d0 100644 --- a/enos/ci/service-user-iam/service-quotas.tf +++ b/enos/ci/service-user-iam/service-quotas.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - locals { // This is the code of the service quota to request a change for. Each adjustable limit has a // unique code. See, https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/servicequotas_service_quota#quota_code @@ -9,35 +6,35 @@ locals { } resource "aws_servicequotas_service_quota" "vpcs_per_region_us_east_1" { - provider = aws.us_east_1 + provider = aws.us_east_2 quota_code = local.subnets_per_vpcs_quota service_code = "vpc" - value = 100 + value = 50 } resource "aws_servicequotas_service_quota" "vpcs_per_region_us_east_2" { provider = aws.us_east_2 quota_code = local.subnets_per_vpcs_quota service_code = "vpc" - value = 100 + value = 50 } resource "aws_servicequotas_service_quota" "vpcs_per_region_us_west_1" { provider = aws.us_west_1 quota_code = local.subnets_per_vpcs_quota service_code = "vpc" - value = 100 + value = 50 } resource "aws_servicequotas_service_quota" "vpcs_per_region_us_west_2" { provider = aws.us_west_2 quota_code = local.subnets_per_vpcs_quota service_code = "vpc" - value = 100 + value = 50 } resource "aws_servicequotas_service_quota" "spot_requests_per_region_us_east_1" { - provider = aws.us_east_1 + provider = aws.us_east_2 quota_code = local.standard_spot_instance_requests_quota service_code = "ec2" value = 640 diff --git a/enos/ci/service-user-iam/variables.tf b/enos/ci/service-user-iam/variables.tf index a80d83ca98c338..6cc7efd6bd9bc3 100644 --- a/enos/ci/service-user-iam/variables.tf +++ b/enos/ci/service-user-iam/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "repository" { description = "The GitHub repository, either vault or vault-enterprise" type = string diff --git a/enos/enos-modules.hcl b/enos/enos-modules.hcl index 0886b46a061b63..19116dd51ca099 100644 --- a/enos/enos-modules.hcl +++ b/enos/enos-modules.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - module "autopilot_upgrade_storageconfig" { source = "./modules/autopilot_upgrade_storageconfig" } diff --git a/enos/enos-providers.hcl b/enos/enos-providers.hcl index f277c57e29f03b..9301b55037d064 100644 --- a/enos/enos-providers.hcl +++ b/enos/enos-providers.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - provider "aws" "default" { region = var.aws_region } diff --git a/enos/enos-scenario-agent.hcl b/enos/enos-scenario-agent.hcl index dd10112ceacb5c..d7b4cc3994f6cf 100644 --- a/enos/enos-scenario-agent.hcl +++ b/enos/enos-scenario-agent.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - scenario "agent" { matrix { arch = ["amd64", "arm64"] diff --git a/enos/enos-scenario-autopilot.hcl b/enos/enos-scenario-autopilot.hcl index 234451872acba7..4e86348edf67b2 100644 --- a/enos/enos-scenario-autopilot.hcl +++ b/enos/enos-scenario-autopilot.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - scenario "autopilot" { matrix { arch = ["amd64", "arm64"] diff --git a/enos/enos-scenario-replication.hcl b/enos/enos-scenario-replication.hcl index aebfd849b9ee46..4324c01ce2ceb9 100644 --- a/enos/enos-scenario-replication.hcl +++ b/enos/enos-scenario-replication.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - // The replication scenario configures performance replication between two Vault clusters and verifies // known_primary_cluster_addrs are updated on secondary Vault cluster with the IP addresses of replaced // nodes on primary Vault cluster diff --git a/enos/enos-scenario-smoke.hcl b/enos/enos-scenario-smoke.hcl index 3e5c313f02e634..0d2ebed5f581fc 100644 --- a/enos/enos-scenario-smoke.hcl +++ b/enos/enos-scenario-smoke.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - scenario "smoke" { matrix { arch = ["amd64", "arm64"] diff --git a/enos/enos-scenario-ui.hcl b/enos/enos-scenario-ui.hcl index 9cd734a751a52d..619cd73efc2149 100644 --- a/enos/enos-scenario-ui.hcl +++ b/enos/enos-scenario-ui.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - scenario "ui" { matrix { edition = ["oss", "ent"] diff --git a/enos/enos-scenario-upgrade.hcl b/enos/enos-scenario-upgrade.hcl index adc97dc584567e..e6e719e800a14f 100644 --- a/enos/enos-scenario-upgrade.hcl +++ b/enos/enos-scenario-upgrade.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - scenario "upgrade" { matrix { arch = ["amd64", "arm64"] diff --git a/enos/enos-terraform.hcl b/enos/enos-terraform.hcl index 4048f5e181c2aa..e45ccaa888f286 100644 --- a/enos/enos-terraform.hcl +++ b/enos/enos-terraform.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform_cli "default" { plugin_cache_dir = var.terraform_plugin_cache_dir != null ? abspath(var.terraform_plugin_cache_dir) : null diff --git a/enos/enos-variables.hcl b/enos/enos-variables.hcl index ef445d0a2b09ae..1d0aea6ebd6b3d 100644 --- a/enos/enos-variables.hcl +++ b/enos/enos-variables.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "artifact_path" { type = string description = "The local path for dev artifact to test" diff --git a/enos/enos.vars.hcl b/enos/enos.vars.hcl index d63af4452c8069..d157c7b92650ec 100644 --- a/enos/enos.vars.hcl +++ b/enos/enos.vars.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # aws_region is the AWS region where we'll create infrastructure # for the smoke scenario # aws_region = "us-west-1" diff --git a/enos/k8s/enos-modules-k8s.hcl b/enos/k8s/enos-modules-k8s.hcl index 76518403b750cb..75bfd7856461d9 100644 --- a/enos/k8s/enos-modules-k8s.hcl +++ b/enos/k8s/enos-modules-k8s.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - module "create_kind_cluster" { source = "../modules/local_kind_cluster" } diff --git a/enos/k8s/enos-providers-k8s.hcl b/enos/k8s/enos-providers-k8s.hcl index 7e3d7a77431571..2a0cf2be34841a 100644 --- a/enos/k8s/enos-providers-k8s.hcl +++ b/enos/k8s/enos-providers-k8s.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - provider "enos" "default" {} provider "helm" "default" { diff --git a/enos/k8s/enos-scenario-k8s.hcl b/enos/k8s/enos-scenario-k8s.hcl index 8953d5c37e596e..cc4769dd4f6457 100644 --- a/enos/k8s/enos-scenario-k8s.hcl +++ b/enos/k8s/enos-scenario-k8s.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - scenario "k8s" { matrix { edition = ["oss", "ent"] diff --git a/enos/k8s/enos-terraform-k8s.hcl b/enos/k8s/enos-terraform-k8s.hcl index 5b68bcada61e80..a389153d24f25d 100644 --- a/enos/k8s/enos-terraform-k8s.hcl +++ b/enos/k8s/enos-terraform-k8s.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform "k8s" { required_version = ">= 1.2.0" diff --git a/enos/k8s/enos-variables-k8s.hcl b/enos/k8s/enos-variables-k8s.hcl index 86bf9d5e3642bb..fb8983f2184290 100644 --- a/enos/k8s/enos-variables-k8s.hcl +++ b/enos/k8s/enos-variables-k8s.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_image_repository" { description = "The repository for the docker image to load, i.e. hashicorp/vault" type = string diff --git a/enos/modules/autopilot_upgrade_storageconfig/main.tf b/enos/modules/autopilot_upgrade_storageconfig/main.tf index 68f47d19dd716f..6093b8b1066de6 100644 --- a/enos/modules/autopilot_upgrade_storageconfig/main.tf +++ b/enos/modules/autopilot_upgrade_storageconfig/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_product_version" {} output "storage_addl_config" { diff --git a/enos/modules/az_finder/main.tf b/enos/modules/az_finder/main.tf index 3508ff0cc39c71..b55975578c61cd 100644 --- a/enos/modules/az_finder/main.tf +++ b/enos/modules/az_finder/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { aws = { diff --git a/enos/modules/backend_raft/main.tf b/enos/modules/backend_raft/main.tf index 2e6afa215e9eba..4cb8e58a592bfb 100644 --- a/enos/modules/backend_raft/main.tf +++ b/enos/modules/backend_raft/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - // Shim module to handle the fact that Vault doesn't actually need a backend module terraform { required_providers { diff --git a/enos/modules/build_crt/main.tf b/enos/modules/build_crt/main.tf index 1e125763a16d5a..cffa44b17a0012 100644 --- a/enos/modules/build_crt/main.tf +++ b/enos/modules/build_crt/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # Shim module since CRT provided things will use the crt_bundle_path variable variable "bundle_path" { default = "/tmp/vault.zip" diff --git a/enos/modules/build_local/main.tf b/enos/modules/build_local/main.tf index a55850076ed3cb..11f29585eca777 100644 --- a/enos/modules/build_local/main.tf +++ b/enos/modules/build_local/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/build_local/scripts/build.sh b/enos/modules/build_local/scripts/build.sh index 64f1bbd8d7d6e2..385a3af5570409 100755 --- a/enos/modules/build_local/scripts/build.sh +++ b/enos/modules/build_local/scripts/build.sh @@ -1,7 +1,4 @@ #!/bin/bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -eux -o pipefail # Install yarn so we can build the UI diff --git a/enos/modules/generate_secondary_token/main.tf b/enos/modules/generate_secondary_token/main.tf index 49a4a15e24c0e7..fbba304bd7cb41 100644 --- a/enos/modules/generate_secondary_token/main.tf +++ b/enos/modules/generate_secondary_token/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/get_local_metadata/main.tf b/enos/modules/get_local_metadata/main.tf index 1c5dc84be9cbc2..fd33673470efed 100644 --- a/enos/modules/get_local_metadata/main.tf +++ b/enos/modules/get_local_metadata/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/get_local_metadata/scripts/build_date.sh b/enos/modules/get_local_metadata/scripts/build_date.sh index fc763fd4e91a96..917888eb1cbfa0 100755 --- a/enos/modules/get_local_metadata/scripts/build_date.sh +++ b/enos/modules/get_local_metadata/scripts/build_date.sh @@ -1,7 +1,4 @@ #!/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -eu -o pipefail pushd "$(git rev-parse --show-toplevel)" > /dev/null diff --git a/enos/modules/get_local_metadata/scripts/version.sh b/enos/modules/get_local_metadata/scripts/version.sh index 74a561f11bd8ed..6921d772ea467c 100755 --- a/enos/modules/get_local_metadata/scripts/version.sh +++ b/enos/modules/get_local_metadata/scripts/version.sh @@ -1,7 +1,4 @@ #!/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -eu -o pipefail pushd "$(git rev-parse --show-toplevel)" > /dev/null diff --git a/enos/modules/k8s_deploy_vault/main.tf b/enos/modules/k8s_deploy_vault/main.tf index 72f4f4700354f7..1878ebeb725968 100644 --- a/enos/modules/k8s_deploy_vault/main.tf +++ b/enos/modules/k8s_deploy_vault/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_version = ">= 1.0" diff --git a/enos/modules/k8s_deploy_vault/variables.tf b/enos/modules/k8s_deploy_vault/variables.tf index 55fa6f1da6b9ab..e9867d7fa0fb7d 100644 --- a/enos/modules/k8s_deploy_vault/variables.tf +++ b/enos/modules/k8s_deploy_vault/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "context_name" { type = string description = "The name of the k8s context for Vault" diff --git a/enos/modules/k8s_vault_verify_build_date/main.tf b/enos/modules/k8s_vault_verify_build_date/main.tf index 366497d08475c6..38f17fbc9e7978 100644 --- a/enos/modules/k8s_vault_verify_build_date/main.tf +++ b/enos/modules/k8s_vault_verify_build_date/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { diff --git a/enos/modules/k8s_vault_verify_build_date/variables.tf b/enos/modules/k8s_vault_verify_build_date/variables.tf index d960b7840e3e4a..7bba75ba68d825 100644 --- a/enos/modules/k8s_vault_verify_build_date/variables.tf +++ b/enos/modules/k8s_vault_verify_build_date/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_instance_count" { type = number description = "How many vault instances are in the cluster" diff --git a/enos/modules/k8s_vault_verify_replication/main.tf b/enos/modules/k8s_vault_verify_replication/main.tf index 27824dc7676b62..804b934591f1cb 100644 --- a/enos/modules/k8s_vault_verify_replication/main.tf +++ b/enos/modules/k8s_vault_verify_replication/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { diff --git a/enos/modules/k8s_vault_verify_replication/scripts/smoke-verify-replication.sh b/enos/modules/k8s_vault_verify_replication/scripts/smoke-verify-replication.sh index 5786502cbcdd2a..363ce7185845bc 100755 --- a/enos/modules/k8s_vault_verify_replication/scripts/smoke-verify-replication.sh +++ b/enos/modules/k8s_vault_verify_replication/scripts/smoke-verify-replication.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # The Vault replication smoke test, documented in # https://docs.google.com/document/d/16sjIk3hzFDPyY5A9ncxTZV_9gnpYSF1_Vx6UA1iiwgI/edit#heading=h.kgrxf0f1et25 diff --git a/enos/modules/k8s_vault_verify_replication/variables.tf b/enos/modules/k8s_vault_verify_replication/variables.tf index 7d4337801defad..42ab38aa572bbe 100644 --- a/enos/modules/k8s_vault_verify_replication/variables.tf +++ b/enos/modules/k8s_vault_verify_replication/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_instance_count" { type = number description = "How many vault instances are in the cluster" diff --git a/enos/modules/k8s_vault_verify_ui/main.tf b/enos/modules/k8s_vault_verify_ui/main.tf index ce5796096771fa..faccb7085870d0 100644 --- a/enos/modules/k8s_vault_verify_ui/main.tf +++ b/enos/modules/k8s_vault_verify_ui/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { diff --git a/enos/modules/k8s_vault_verify_ui/scripts/smoke-verify-ui.sh b/enos/modules/k8s_vault_verify_ui/scripts/smoke-verify-ui.sh index f6b8a278dce722..b85d4da124737f 100755 --- a/enos/modules/k8s_vault_verify_ui/scripts/smoke-verify-ui.sh +++ b/enos/modules/k8s_vault_verify_ui/scripts/smoke-verify-ui.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/k8s_vault_verify_ui/variables.tf b/enos/modules/k8s_vault_verify_ui/variables.tf index c39f24e1fa2e55..6c06d5de792ec3 100644 --- a/enos/modules/k8s_vault_verify_ui/variables.tf +++ b/enos/modules/k8s_vault_verify_ui/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_instance_count" { type = number description = "How many vault instances are in the cluster" diff --git a/enos/modules/k8s_vault_verify_version/main.tf b/enos/modules/k8s_vault_verify_version/main.tf index 8decaaa778489d..693abf97910f27 100644 --- a/enos/modules/k8s_vault_verify_version/main.tf +++ b/enos/modules/k8s_vault_verify_version/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { diff --git a/enos/modules/k8s_vault_verify_version/scripts/get-status.sh b/enos/modules/k8s_vault_verify_version/scripts/get-status.sh index a799ebc66f3fbb..3d2d1fe97506a6 100755 --- a/enos/modules/k8s_vault_verify_version/scripts/get-status.sh +++ b/enos/modules/k8s_vault_verify_version/scripts/get-status.sh @@ -1,7 +1,4 @@ #!/usr/bin/env sh -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/k8s_vault_verify_version/scripts/smoke-verify-version.sh b/enos/modules/k8s_vault_verify_version/scripts/smoke-verify-version.sh index 514969cf509855..d5c439a9b264ca 100755 --- a/enos/modules/k8s_vault_verify_version/scripts/smoke-verify-version.sh +++ b/enos/modules/k8s_vault_verify_version/scripts/smoke-verify-version.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # The Vault smoke test to verify the Vault version installed diff --git a/enos/modules/k8s_vault_verify_version/variables.tf b/enos/modules/k8s_vault_verify_version/variables.tf index 58940a8551bdac..ed487831a3c06c 100644 --- a/enos/modules/k8s_vault_verify_version/variables.tf +++ b/enos/modules/k8s_vault_verify_version/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_instance_count" { type = number description = "How many vault instances are in the cluster" diff --git a/enos/modules/k8s_vault_verify_write_data/main.tf b/enos/modules/k8s_vault_verify_write_data/main.tf index 5606b89883521c..01caeaba4c1589 100644 --- a/enos/modules/k8s_vault_verify_write_data/main.tf +++ b/enos/modules/k8s_vault_verify_write_data/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { diff --git a/enos/modules/k8s_vault_verify_write_data/variables.tf b/enos/modules/k8s_vault_verify_write_data/variables.tf index d960b7840e3e4a..7bba75ba68d825 100644 --- a/enos/modules/k8s_vault_verify_write_data/variables.tf +++ b/enos/modules/k8s_vault_verify_write_data/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_instance_count" { type = number description = "How many vault instances are in the cluster" diff --git a/enos/modules/load_docker_image/main.tf b/enos/modules/load_docker_image/main.tf index 4e5f293f90843c..854c52f9ea690b 100644 --- a/enos/modules/load_docker_image/main.tf +++ b/enos/modules/load_docker_image/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/local_kind_cluster/main.tf b/enos/modules/local_kind_cluster/main.tf index 5a351679b2e250..c5da14daf770af 100644 --- a/enos/modules/local_kind_cluster/main.tf +++ b/enos/modules/local_kind_cluster/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/read_license/main.tf b/enos/modules/read_license/main.tf index a1358b3e293d99..1b645272abe7cd 100644 --- a/enos/modules/read_license/main.tf +++ b/enos/modules/read_license/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "file_name" {} output "license" { diff --git a/enos/modules/shutdown_multiple_nodes/main.tf b/enos/modules/shutdown_multiple_nodes/main.tf index 86045db0bc255c..df0d1fb8760016 100644 --- a/enos/modules/shutdown_multiple_nodes/main.tf +++ b/enos/modules/shutdown_multiple_nodes/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/shutdown_node/main.tf b/enos/modules/shutdown_node/main.tf index f27de68534e843..0ab4617e163009 100644 --- a/enos/modules/shutdown_node/main.tf +++ b/enos/modules/shutdown_node/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_agent/main.tf b/enos/modules/vault_agent/main.tf index cb112020b39030..001a53278a27a8 100644 --- a/enos/modules/vault_agent/main.tf +++ b/enos/modules/vault_agent/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { aws = { diff --git a/enos/modules/vault_agent/templates/set-up-approle-and-agent.sh b/enos/modules/vault_agent/templates/set-up-approle-and-agent.sh index 42a0976416423e..5444508de30366 100644 --- a/enos/modules/vault_agent/templates/set-up-approle-and-agent.sh +++ b/enos/modules/vault_agent/templates/set-up-approle-and-agent.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_artifactory_artifact/locals.tf b/enos/modules/vault_artifactory_artifact/locals.tf index 708813faa4541f..e022a62c3966ec 100644 --- a/enos/modules/vault_artifactory_artifact/locals.tf +++ b/enos/modules/vault_artifactory_artifact/locals.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - locals { // file name extensions for the install packages of vault for the various architectures, distributions and editions diff --git a/enos/modules/vault_artifactory_artifact/main.tf b/enos/modules/vault_artifactory_artifact/main.tf index 0f0df3865c7ad9..ebc517030ba1e1 100644 --- a/enos/modules/vault_artifactory_artifact/main.tf +++ b/enos/modules/vault_artifactory_artifact/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_artifactory_artifact/outputs.tf b/enos/modules/vault_artifactory_artifact/outputs.tf index c100c45ddd9794..827b2e77341581 100644 --- a/enos/modules/vault_artifactory_artifact/outputs.tf +++ b/enos/modules/vault_artifactory_artifact/outputs.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - output "url" { value = data.enos_artifactory_item.vault.results[0].url diff --git a/enos/modules/vault_artifactory_artifact/variables.tf b/enos/modules/vault_artifactory_artifact/variables.tf index 7b641ce98f9574..778354e7deeab3 100644 --- a/enos/modules/vault_artifactory_artifact/variables.tf +++ b/enos/modules/vault_artifactory_artifact/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "artifactory_username" { type = string diff --git a/enos/modules/vault_get_cluster_ips/main.tf b/enos/modules/vault_get_cluster_ips/main.tf index 25a8902108e138..a288798e60d7d8 100644 --- a/enos/modules/vault_get_cluster_ips/main.tf +++ b/enos/modules/vault_get_cluster_ips/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_get_cluster_ips/scripts/get-leader-private-ip.sh b/enos/modules/vault_get_cluster_ips/scripts/get-leader-private-ip.sh index 98b2d21fdda6c0..360e9d79d6a84f 100644 --- a/enos/modules/vault_get_cluster_ips/scripts/get-leader-private-ip.sh +++ b/enos/modules/vault_get_cluster_ips/scripts/get-leader-private-ip.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_raft_remove_peer/main.tf b/enos/modules/vault_raft_remove_peer/main.tf index a0da72249e1fdc..f1d33d9c0337b4 100644 --- a/enos/modules/vault_raft_remove_peer/main.tf +++ b/enos/modules/vault_raft_remove_peer/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_raft_remove_peer/templates/raft-remove-peer.sh b/enos/modules/vault_raft_remove_peer/templates/raft-remove-peer.sh index ab49f76ba82044..6d967ee30d5e6f 100644 --- a/enos/modules/vault_raft_remove_peer/templates/raft-remove-peer.sh +++ b/enos/modules/vault_raft_remove_peer/templates/raft-remove-peer.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_setup_perf_primary/main.tf b/enos/modules/vault_setup_perf_primary/main.tf index 268a5323ccf1d4..85f5cc910c764e 100644 --- a/enos/modules/vault_setup_perf_primary/main.tf +++ b/enos/modules/vault_setup_perf_primary/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_setup_perf_primary/scripts/configure-vault-pr-primary.sh b/enos/modules/vault_setup_perf_primary/scripts/configure-vault-pr-primary.sh index d15699c4b9667f..679729f3f5a3db 100644 --- a/enos/modules/vault_setup_perf_primary/scripts/configure-vault-pr-primary.sh +++ b/enos/modules/vault_setup_perf_primary/scripts/configure-vault-pr-primary.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_setup_perf_secondary/main.tf b/enos/modules/vault_setup_perf_secondary/main.tf index 3be3ca8897f7f4..1fac54b94d2c66 100644 --- a/enos/modules/vault_setup_perf_secondary/main.tf +++ b/enos/modules/vault_setup_perf_secondary/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_test_ui/main.tf b/enos/modules/vault_test_ui/main.tf index 0e051bdf48cff7..2af426232e46b9 100644 --- a/enos/modules/vault_test_ui/main.tf +++ b/enos/modules/vault_test_ui/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_test_ui/outputs.tf b/enos/modules/vault_test_ui/outputs.tf index 887d030a7899dd..abe4924cebe7df 100644 --- a/enos/modules/vault_test_ui/outputs.tf +++ b/enos/modules/vault_test_ui/outputs.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - output "ui_test_stderr" { value = var.ui_run_tests ? enos_local_exec.test_ui[0].stderr : "No std out tests where not run" } diff --git a/enos/modules/vault_test_ui/scripts/test_ui.sh b/enos/modules/vault_test_ui/scripts/test_ui.sh index e7cf7e9564ed99..f84cb929f878d3 100755 --- a/enos/modules/vault_test_ui/scripts/test_ui.sh +++ b/enos/modules/vault_test_ui/scripts/test_ui.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -eux -o pipefail diff --git a/enos/modules/vault_test_ui/variables.tf b/enos/modules/vault_test_ui/variables.tf index c2db5c57b9bba4..807cf01aea7faa 100644 --- a/enos/modules/vault_test_ui/variables.tf +++ b/enos/modules/vault_test_ui/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_addr" { description = "The host address for the vault instance to test" type = string diff --git a/enos/modules/vault_unseal_nodes/main.tf b/enos/modules/vault_unseal_nodes/main.tf index b8b86b3ba99006..2bd1c6c0fbf38d 100644 --- a/enos/modules/vault_unseal_nodes/main.tf +++ b/enos/modules/vault_unseal_nodes/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # This module unseals the replication secondary follower nodes terraform { required_providers { diff --git a/enos/modules/vault_unseal_nodes/scripts/unseal-node.sh b/enos/modules/vault_unseal_nodes/scripts/unseal-node.sh index b3f77de5041cfd..6fe00a93de295f 100755 --- a/enos/modules/vault_unseal_nodes/scripts/unseal-node.sh +++ b/enos/modules/vault_unseal_nodes/scripts/unseal-node.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - binpath=${VAULT_INSTALL_DIR}/vault diff --git a/enos/modules/vault_unseal_nodes/scripts/wait-until-sealed.sh b/enos/modules/vault_unseal_nodes/scripts/wait-until-sealed.sh index af935578781ac9..d0ebb1f067caf3 100644 --- a/enos/modules/vault_unseal_nodes/scripts/wait-until-sealed.sh +++ b/enos/modules/vault_unseal_nodes/scripts/wait-until-sealed.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - binpath=${VAULT_INSTALL_DIR}/vault diff --git a/enos/modules/vault_upgrade/main.tf b/enos/modules/vault_upgrade/main.tf index 5502212d515189..07e65bf197f961 100644 --- a/enos/modules/vault_upgrade/main.tf +++ b/enos/modules/vault_upgrade/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { aws = { diff --git a/enos/modules/vault_upgrade/templates/get-follower-public-ips.sh b/enos/modules/vault_upgrade/templates/get-follower-public-ips.sh index 127be64499cf3a..e424aa44406ceb 100644 --- a/enos/modules/vault_upgrade/templates/get-follower-public-ips.sh +++ b/enos/modules/vault_upgrade/templates/get-follower-public-ips.sh @@ -1,7 +1,4 @@ #!/bin/bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_upgrade/templates/get-leader-public-ip.sh b/enos/modules/vault_upgrade/templates/get-leader-public-ip.sh index d64a6c16ed366a..5c36dae336f5c8 100644 --- a/enos/modules/vault_upgrade/templates/get-leader-public-ip.sh +++ b/enos/modules/vault_upgrade/templates/get-leader-public-ip.sh @@ -1,7 +1,4 @@ #!/bin/bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_upgrade/templates/restart-vault.sh b/enos/modules/vault_upgrade/templates/restart-vault.sh index fc6b007a350974..aa685364305602 100644 --- a/enos/modules/vault_upgrade/templates/restart-vault.sh +++ b/enos/modules/vault_upgrade/templates/restart-vault.sh @@ -1,7 +1,4 @@ #!/bin/bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -eux diff --git a/enos/modules/vault_verify_agent_output/main.tf b/enos/modules/vault_verify_agent_output/main.tf index 850ea5366c1e69..6643c8b6266203 100644 --- a/enos/modules/vault_verify_agent_output/main.tf +++ b/enos/modules/vault_verify_agent_output/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_agent_output/templates/verify-vault-agent-output.sh b/enos/modules/vault_verify_agent_output/templates/verify-vault-agent-output.sh index cd25a01c8d021b..3c434ba9727621 100644 --- a/enos/modules/vault_verify_agent_output/templates/verify-vault-agent-output.sh +++ b/enos/modules/vault_verify_agent_output/templates/verify-vault-agent-output.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_verify_autopilot/main.tf b/enos/modules/vault_verify_autopilot/main.tf index b1d050af295956..ca03ea6f6f29cf 100644 --- a/enos/modules/vault_verify_autopilot/main.tf +++ b/enos/modules/vault_verify_autopilot/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_autopilot/templates/smoke-verify-autopilot.sh b/enos/modules/vault_verify_autopilot/templates/smoke-verify-autopilot.sh index 129288a3cd2122..1dd5d901471001 100755 --- a/enos/modules/vault_verify_autopilot/templates/smoke-verify-autopilot.sh +++ b/enos/modules/vault_verify_autopilot/templates/smoke-verify-autopilot.sh @@ -1,7 +1,4 @@ #!/bin/bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - token="${vault_token}" autopilot_version="${vault_autopilot_upgrade_version}" diff --git a/enos/modules/vault_verify_performance_replication/main.tf b/enos/modules/vault_verify_performance_replication/main.tf index 6604f8c126eaf6..a44eec5b0dc6b5 100644 --- a/enos/modules/vault_verify_performance_replication/main.tf +++ b/enos/modules/vault_verify_performance_replication/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_raft_auto_join_voter/main.tf b/enos/modules/vault_verify_raft_auto_join_voter/main.tf index 44df4496ab9e51..ded9c3cc7007ff 100644 --- a/enos/modules/vault_verify_raft_auto_join_voter/main.tf +++ b/enos/modules/vault_verify_raft_auto_join_voter/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_raft_auto_join_voter/templates/verify-raft-auto-join-voter.sh b/enos/modules/vault_verify_raft_auto_join_voter/templates/verify-raft-auto-join-voter.sh index 3187ac69fbf00a..e1172d7158f614 100644 --- a/enos/modules/vault_verify_raft_auto_join_voter/templates/verify-raft-auto-join-voter.sh +++ b/enos/modules/vault_verify_raft_auto_join_voter/templates/verify-raft-auto-join-voter.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_verify_read_data/main.tf b/enos/modules/vault_verify_read_data/main.tf index 8a4d7ea7ace5ff..a104e1d677a066 100644 --- a/enos/modules/vault_verify_read_data/main.tf +++ b/enos/modules/vault_verify_read_data/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_read_data/scripts/verify-data.sh b/enos/modules/vault_verify_read_data/scripts/verify-data.sh index 5c095c58caf8e3..d150d8f7efc41d 100644 --- a/enos/modules/vault_verify_read_data/scripts/verify-data.sh +++ b/enos/modules/vault_verify_read_data/scripts/verify-data.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_verify_replication/main.tf b/enos/modules/vault_verify_replication/main.tf index fbb360a38da4c3..57a97f9ddd1b7d 100644 --- a/enos/modules/vault_verify_replication/main.tf +++ b/enos/modules/vault_verify_replication/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { diff --git a/enos/modules/vault_verify_replication/templates/smoke-verify-replication.sh b/enos/modules/vault_verify_replication/templates/smoke-verify-replication.sh index 1ef6207a37eb8f..d7bc72f23c246b 100644 --- a/enos/modules/vault_verify_replication/templates/smoke-verify-replication.sh +++ b/enos/modules/vault_verify_replication/templates/smoke-verify-replication.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # The Vault replication smoke test, documented in # https://docs.google.com/document/d/16sjIk3hzFDPyY5A9ncxTZV_9gnpYSF1_Vx6UA1iiwgI/edit#heading=h.kgrxf0f1et25 diff --git a/enos/modules/vault_verify_replication/variables.tf b/enos/modules/vault_verify_replication/variables.tf index 26ac75c912916e..b335ee45efcef9 100644 --- a/enos/modules/vault_verify_replication/variables.tf +++ b/enos/modules/vault_verify_replication/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_edition" { type = string diff --git a/enos/modules/vault_verify_ui/main.tf b/enos/modules/vault_verify_ui/main.tf index 32986072cba775..5703326d1a5109 100644 --- a/enos/modules/vault_verify_ui/main.tf +++ b/enos/modules/vault_verify_ui/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { diff --git a/enos/modules/vault_verify_ui/templates/smoke-verify-ui.sh b/enos/modules/vault_verify_ui/templates/smoke-verify-ui.sh index 2ec23a107332b4..bcd7e1cc3055e9 100644 --- a/enos/modules/vault_verify_ui/templates/smoke-verify-ui.sh +++ b/enos/modules/vault_verify_ui/templates/smoke-verify-ui.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_verify_ui/variables.tf b/enos/modules/vault_verify_ui/variables.tf index 344f0d8077d7b7..7eaf5d1bf7f4ad 100644 --- a/enos/modules/vault_verify_ui/variables.tf +++ b/enos/modules/vault_verify_ui/variables.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - variable "vault_install_dir" { type = string diff --git a/enos/modules/vault_verify_undo_logs/main.tf b/enos/modules/vault_verify_undo_logs/main.tf index 717d90735fd075..c856ca4c92c597 100644 --- a/enos/modules/vault_verify_undo_logs/main.tf +++ b/enos/modules/vault_verify_undo_logs/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_undo_logs/scripts/smoke-verify-undo-logs.sh b/enos/modules/vault_verify_undo_logs/scripts/smoke-verify-undo-logs.sh index ec308dd8bbaf0d..5761ea1d74741a 100644 --- a/enos/modules/vault_verify_undo_logs/scripts/smoke-verify-undo-logs.sh +++ b/enos/modules/vault_verify_undo_logs/scripts/smoke-verify-undo-logs.sh @@ -1,7 +1,4 @@ #!/bin/bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - function fail() { echo "$1" 1>&2 diff --git a/enos/modules/vault_verify_unsealed/main.tf b/enos/modules/vault_verify_unsealed/main.tf index 45d15418a63276..0b615295cea699 100644 --- a/enos/modules/vault_verify_unsealed/main.tf +++ b/enos/modules/vault_verify_unsealed/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_unsealed/templates/verify-vault-node-unsealed.sh b/enos/modules/vault_verify_unsealed/templates/verify-vault-node-unsealed.sh index c69c253ba4c1aa..aefc75ec117a0b 100644 --- a/enos/modules/vault_verify_unsealed/templates/verify-vault-node-unsealed.sh +++ b/enos/modules/vault_verify_unsealed/templates/verify-vault-node-unsealed.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_verify_version/main.tf b/enos/modules/vault_verify_version/main.tf index 88b4e7a00d42d4..9e80f456c3b5a2 100644 --- a/enos/modules/vault_verify_version/main.tf +++ b/enos/modules/vault_verify_version/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_version/templates/verify-cluster-version.sh b/enos/modules/vault_verify_version/templates/verify-cluster-version.sh index ba5df7488580fb..3fd21023517148 100644 --- a/enos/modules/vault_verify_version/templates/verify-cluster-version.sh +++ b/enos/modules/vault_verify_version/templates/verify-cluster-version.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # Verify the Vault "version" includes the correct base version, build date, # revision SHA, and edition metadata. diff --git a/enos/modules/vault_verify_write_data/main.tf b/enos/modules/vault_verify_write_data/main.tf index 2369e51dd40468..afb2cf2bc28ccb 100644 --- a/enos/modules/vault_verify_write_data/main.tf +++ b/enos/modules/vault_verify_write_data/main.tf @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - terraform { required_providers { enos = { diff --git a/enos/modules/vault_verify_write_data/scripts/smoke-enable-secrets-kv.sh b/enos/modules/vault_verify_write_data/scripts/smoke-enable-secrets-kv.sh index 2d3e81c2161a39..0e32060810befb 100644 --- a/enos/modules/vault_verify_write_data/scripts/smoke-enable-secrets-kv.sh +++ b/enos/modules/vault_verify_write_data/scripts/smoke-enable-secrets-kv.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/enos/modules/vault_verify_write_data/scripts/smoke-write-test-data.sh b/enos/modules/vault_verify_write_data/scripts/smoke-write-test-data.sh index 98b6392c580cb0..62b357cc897b93 100644 --- a/enos/modules/vault_verify_write_data/scripts/smoke-write-test-data.sh +++ b/enos/modules/vault_verify_write_data/scripts/smoke-write-test-data.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/go.mod b/go.mod index 04b5cdbbaede2e..4a48c4957c4632 100644 --- a/go.mod +++ b/go.mod @@ -1,16 +1,6 @@ module github.com/hashicorp/vault -// The go version directive value isn't consulted when building our production binaries, -// and the vault module isn't intended to be imported into other projects. As such the -// impact of this setting is usually rather limited. Note however that in some cases the -// Go project introduces new semantics for handling of go.mod depending on the value. -// -// The general policy for updating it is: when the Go major version used on the branch is -// updated. If we choose not to do so at some point (e.g. because we don't want some new -// semantic related to Go module handling), this comment should be updated to explain that. -// -// Whenever this value gets updated, sdk/go.mod should be updated to the same value. -go 1.20 +go 1.19 replace github.com/hashicorp/vault/api => ./api @@ -22,26 +12,28 @@ replace github.com/hashicorp/vault/api/auth/userpass => ./api/auth/userpass replace github.com/hashicorp/vault/sdk => ./sdk +replace go.etcd.io/etcd/client/pkg/v3 v3.5.0 => go.etcd.io/etcd/client/pkg/v3 v3.0.0-20210928084031-3df272774672 + require ( - cloud.google.com/go/monitoring v1.13.0 - cloud.google.com/go/spanner v1.45.0 + cloud.google.com/go/monitoring v1.12.0 + cloud.google.com/go/spanner v1.44.0 cloud.google.com/go/storage v1.28.1 - github.com/Azure/azure-storage-blob-go v0.15.0 - github.com/Azure/go-autorest/autorest v0.11.29 - github.com/Azure/go-autorest/autorest/adal v0.9.22 + github.com/Azure/azure-storage-blob-go v0.14.0 + github.com/Azure/go-autorest/autorest v0.11.28 + github.com/Azure/go-autorest/autorest/adal v0.9.20 github.com/NYTimes/gziphandler v1.1.1 github.com/ProtonMail/go-crypto v0.0.0-20220824120805-4b6e5c587895 github.com/SAP/go-hdb v0.14.1 github.com/Sectorbob/mlab-ns2 v0.0.0-20171030222938-d3aa0c295a8a github.com/aerospike/aerospike-client-go/v5 v5.6.0 - github.com/aliyun/alibaba-cloud-sdk-go v1.62.301 + github.com/aliyun/alibaba-cloud-sdk-go v1.62.146 github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190307165228-86c17b95fcd5 github.com/apple/foundationdb/bindings/go v0.0.0-20190411004307-cd5c9d91fad2 github.com/armon/go-metrics v0.4.1 github.com/armon/go-radix v1.0.0 github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef - github.com/aws/aws-sdk-go v1.44.268 - github.com/aws/aws-sdk-go-v2/config v1.18.19 + github.com/aws/aws-sdk-go v1.44.191 + github.com/aws/aws-sdk-go-v2/config v1.6.0 github.com/axiomhq/hyperloglog v0.0.0-20220105174342-98591331716a github.com/cenkalti/backoff/v3 v3.2.2 github.com/chrismalek/oktasdk-go v0.0.0-20181212195951-3430665dfaa0 @@ -49,37 +41,37 @@ require ( github.com/cockroachdb/cockroach-go v0.0.0-20181001143604-e0a95dfd547c github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf github.com/denisenkom/go-mssqldb v0.12.2 + github.com/docker/docker v20.10.18+incompatible + github.com/docker/go-connections v0.4.0 github.com/duosecurity/duo_api_golang v0.0.0-20190308151101-6c680f768e74 github.com/dustin/go-humanize v1.0.0 - github.com/fatih/color v1.15.0 + github.com/fatih/color v1.14.1 github.com/fatih/structs v1.1.0 - github.com/favadi/protoc-go-inject-tag v1.4.0 + github.com/favadi/protoc-go-inject-tag v1.3.0 github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 - github.com/go-errors/errors v1.4.2 - github.com/go-jose/go-jose/v3 v3.0.0 - github.com/go-ldap/ldap/v3 v3.4.4 + github.com/go-errors/errors v1.4.1 + github.com/go-ldap/ldap/v3 v3.4.1 github.com/go-sql-driver/mysql v1.6.0 github.com/go-test/deep v1.1.0 github.com/go-zookeeper/zk v1.0.3 github.com/gocql/gocql v1.0.0 - github.com/golang-jwt/jwt/v4 v4.5.0 - github.com/golang/protobuf v1.5.3 + github.com/golang-jwt/jwt/v4 v4.4.2 + github.com/golang/protobuf v1.5.2 github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 github.com/google/go-cmp v0.5.9 github.com/google/go-github v17.0.0+incompatible github.com/google/go-metrics-stackdriver v0.2.0 - github.com/google/tink/go v1.7.0 - github.com/hashicorp/cap v0.3.1 - github.com/hashicorp/consul-template v0.32.0 - github.com/hashicorp/consul/api v1.20.0 + github.com/google/tink/go v1.6.1 + github.com/hashicorp/cap v0.2.1-0.20230221194157-7894fed1633d + github.com/hashicorp/consul-template v0.29.5 + github.com/hashicorp/consul/api v1.17.0 github.com/hashicorp/errwrap v1.1.0 github.com/hashicorp/eventlogger v0.1.1 github.com/hashicorp/go-cleanhttp v0.5.2 github.com/hashicorp/go-discover v0.0.0-20210818145131-c573d69da192 github.com/hashicorp/go-gcp-common v0.8.0 github.com/hashicorp/go-hclog v1.5.0 - github.com/hashicorp/go-kms-wrapping/entropy/v2 v2.0.0 - github.com/hashicorp/go-kms-wrapping/v2 v2.0.9 + github.com/hashicorp/go-kms-wrapping/v2 v2.0.8 github.com/hashicorp/go-kms-wrapping/wrappers/aead/v2 v2.0.7-1 github.com/hashicorp/go-kms-wrapping/wrappers/alicloudkms/v2 v2.0.1 github.com/hashicorp/go-kms-wrapping/wrappers/awskms/v2 v2.0.7 @@ -90,16 +82,15 @@ require ( github.com/hashicorp/go-memdb v1.3.3 github.com/hashicorp/go-msgpack v1.1.5 github.com/hashicorp/go-multierror v1.1.1 - github.com/hashicorp/go-plugin v1.4.9 + github.com/hashicorp/go-plugin v1.4.8 github.com/hashicorp/go-raftchunking v0.6.3-0.20191002164813-7e9e8525653a - github.com/hashicorp/go-retryablehttp v0.7.2 + github.com/hashicorp/go-retryablehttp v0.7.1 github.com/hashicorp/go-rootcerts v1.0.2 - github.com/hashicorp/go-secure-stdlib/awsutil v0.2.2 + github.com/hashicorp/go-secure-stdlib/awsutil v0.1.6 github.com/hashicorp/go-secure-stdlib/base62 v0.1.2 github.com/hashicorp/go-secure-stdlib/gatedwriter v0.1.1 github.com/hashicorp/go-secure-stdlib/kv-builder v0.1.2 - github.com/hashicorp/go-secure-stdlib/mlock v0.1.3 - github.com/hashicorp/go-secure-stdlib/nonceutil v0.1.0 + github.com/hashicorp/go-secure-stdlib/mlock v0.1.2 github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7 github.com/hashicorp/go-secure-stdlib/password v0.1.1 github.com/hashicorp/go-secure-stdlib/reloadutil v0.1.1 @@ -111,57 +102,56 @@ require ( github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/golang-lru v0.5.4 github.com/hashicorp/hcl v1.0.1-vault-5 - github.com/hashicorp/hcl/v2 v2.16.2 github.com/hashicorp/hcp-link v0.1.0 github.com/hashicorp/hcp-scada-provider v0.2.1 github.com/hashicorp/hcp-sdk-go v0.23.0 - github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf + github.com/hashicorp/nomad/api v0.0.0-20220707195938-75f4c2237b28 github.com/hashicorp/raft v1.3.10 github.com/hashicorp/raft-autopilot v0.2.0 github.com/hashicorp/raft-boltdb/v2 v2.0.0-20210421194847-a7e34179d62c github.com/hashicorp/raft-snapshot v1.0.4 - github.com/hashicorp/vault-plugin-auth-alicloud v0.15.0 - github.com/hashicorp/vault-plugin-auth-azure v0.15.0 - github.com/hashicorp/vault-plugin-auth-centrify v0.15.1 - github.com/hashicorp/vault-plugin-auth-cf v0.15.0 - github.com/hashicorp/vault-plugin-auth-gcp v0.16.0 - github.com/hashicorp/vault-plugin-auth-jwt v0.16.0 - github.com/hashicorp/vault-plugin-auth-kerberos v0.10.0 - github.com/hashicorp/vault-plugin-auth-kubernetes v0.16.0 - github.com/hashicorp/vault-plugin-auth-oci v0.14.0 - github.com/hashicorp/vault-plugin-database-couchbase v0.9.2 - github.com/hashicorp/vault-plugin-database-elasticsearch v0.13.2 - github.com/hashicorp/vault-plugin-database-mongodbatlas v0.10.0 - github.com/hashicorp/vault-plugin-database-redis v0.2.1 - github.com/hashicorp/vault-plugin-database-redis-elasticache v0.2.1 - github.com/hashicorp/vault-plugin-database-snowflake v0.8.0 + github.com/hashicorp/vault-plugin-auth-alicloud v0.14.0 + github.com/hashicorp/vault-plugin-auth-azure v0.13.0 + github.com/hashicorp/vault-plugin-auth-centrify v0.14.0 + github.com/hashicorp/vault-plugin-auth-cf v0.14.0 + github.com/hashicorp/vault-plugin-auth-gcp v0.15.0 + github.com/hashicorp/vault-plugin-auth-jwt v0.15.2 + github.com/hashicorp/vault-plugin-auth-kerberos v0.9.0 + github.com/hashicorp/vault-plugin-auth-kubernetes v0.15.0 + github.com/hashicorp/vault-plugin-auth-oci v0.13.1 + github.com/hashicorp/vault-plugin-database-couchbase v0.9.0 + github.com/hashicorp/vault-plugin-database-elasticsearch v0.13.1 + github.com/hashicorp/vault-plugin-database-mongodbatlas v0.9.0 + github.com/hashicorp/vault-plugin-database-redis v0.2.0 + github.com/hashicorp/vault-plugin-database-redis-elasticache v0.2.0 + github.com/hashicorp/vault-plugin-database-snowflake v0.7.0 github.com/hashicorp/vault-plugin-mock v0.16.1 - github.com/hashicorp/vault-plugin-secrets-ad v0.16.0 - github.com/hashicorp/vault-plugin-secrets-alicloud v0.15.0 - github.com/hashicorp/vault-plugin-secrets-azure v0.16.0 - github.com/hashicorp/vault-plugin-secrets-gcp v0.16.0 - github.com/hashicorp/vault-plugin-secrets-gcpkms v0.15.0 - github.com/hashicorp/vault-plugin-secrets-kubernetes v0.5.0 - github.com/hashicorp/vault-plugin-secrets-kv v0.15.0 - github.com/hashicorp/vault-plugin-secrets-mongodbatlas v0.10.0 - github.com/hashicorp/vault-plugin-secrets-openldap v0.11.0 - github.com/hashicorp/vault-plugin-secrets-terraform v0.7.1 - github.com/hashicorp/vault-testing-stepwise v0.1.3 - github.com/hashicorp/vault/api v1.9.2 + github.com/hashicorp/vault-plugin-secrets-ad v0.15.0 + github.com/hashicorp/vault-plugin-secrets-alicloud v0.14.1 + github.com/hashicorp/vault-plugin-secrets-azure v0.15.0 + github.com/hashicorp/vault-plugin-secrets-gcp v0.15.0 + github.com/hashicorp/vault-plugin-secrets-gcpkms v0.14.0 + github.com/hashicorp/vault-plugin-secrets-kubernetes v0.3.0 + github.com/hashicorp/vault-plugin-secrets-kv v0.14.2 + github.com/hashicorp/vault-plugin-secrets-mongodbatlas v0.9.1 + github.com/hashicorp/vault-plugin-secrets-openldap v0.10.1 + github.com/hashicorp/vault-plugin-secrets-terraform v0.7.0 + github.com/hashicorp/vault-testing-stepwise v0.1.3-0.20230203193428-3a789cb2c68f + github.com/hashicorp/vault/api v1.9.0 github.com/hashicorp/vault/api/auth/approle v0.1.0 github.com/hashicorp/vault/api/auth/userpass v0.1.0 - github.com/hashicorp/vault/sdk v0.9.2-0.20230530190758-08ee474850e0 + github.com/hashicorp/vault/sdk v0.8.1 github.com/hashicorp/vault/vault/hcp_link/proto v0.0.0-20230201201504-b741fa893d77 github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab github.com/jackc/pgx/v4 v4.15.0 - github.com/jcmturner/gokrb5/v8 v8.4.4 + github.com/jcmturner/gokrb5/v8 v8.4.3 github.com/jefferai/isbadcipher v0.0.0-20190226160619-51d2077c035f github.com/jefferai/jsonx v1.0.0 github.com/joyent/triton-go v1.7.1-0.20200416154420-6801d15b779f - github.com/kr/pretty v0.3.1 + github.com/kr/pretty v0.3.0 github.com/kr/text v0.2.0 github.com/mattn/go-colorable v0.1.13 - github.com/mattn/go-isatty v0.0.19 + github.com/mattn/go-isatty v0.0.17 github.com/mholt/archiver/v3 v3.5.1 github.com/michaelklishin/rabbit-hole/v2 v2.12.0 github.com/mikesmitty/edkey v0.0.0-20170222072505-3356ea4e686a @@ -178,49 +168,47 @@ require ( github.com/okta/okta-sdk-golang/v2 v2.12.1 github.com/oracle/oci-go-sdk v24.3.0+incompatible github.com/ory/dockertest v3.3.5+incompatible - github.com/ory/dockertest/v3 v3.10.0 + github.com/ory/dockertest/v3 v3.9.1 github.com/patrickmn/go-cache v2.1.0+incompatible github.com/pires/go-proxyproto v0.6.1 github.com/pkg/errors v0.9.1 github.com/posener/complete v1.2.3 github.com/pquerna/otp v1.2.1-0.20191009055518-468c2dd2b58d - github.com/prometheus/client_golang v1.14.0 - github.com/prometheus/common v0.37.0 + github.com/prometheus/client_golang v1.11.1 + github.com/prometheus/common v0.26.0 github.com/rboyer/safeio v0.2.1 github.com/ryanuber/columnize v2.1.0+incompatible github.com/ryanuber/go-glob v1.0.0 github.com/sasha-s/go-deadlock v0.2.0 github.com/sethvargo/go-limiter v0.7.1 github.com/shirou/gopsutil/v3 v3.22.6 - github.com/stretchr/testify v1.8.4 - go.etcd.io/bbolt v1.3.7 - go.etcd.io/etcd/client/pkg/v3 v3.5.7 - go.etcd.io/etcd/client/v2 v2.305.5 - go.etcd.io/etcd/client/v3 v3.5.7 - go.mongodb.org/atlas v0.28.0 - go.mongodb.org/mongo-driver v1.11.6 - go.opentelemetry.io/otel v1.14.0 - go.opentelemetry.io/otel/sdk v1.14.0 - go.opentelemetry.io/otel/trace v1.14.0 - go.uber.org/atomic v1.11.0 - go.uber.org/goleak v1.2.1 - golang.org/x/crypto v0.9.0 - golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 - golang.org/x/net v0.10.0 - golang.org/x/oauth2 v0.8.0 - golang.org/x/sync v0.2.0 - golang.org/x/sys v0.8.0 - golang.org/x/term v0.8.0 - golang.org/x/text v0.9.0 - golang.org/x/tools v0.7.0 - google.golang.org/api v0.124.0 - google.golang.org/grpc v1.55.0 + github.com/stretchr/testify v1.8.2 + go.etcd.io/bbolt v1.3.6 + go.etcd.io/etcd/client/pkg/v3 v3.5.0 + go.etcd.io/etcd/client/v2 v2.305.0 + go.etcd.io/etcd/client/v3 v3.5.0 + go.mongodb.org/atlas v0.15.0 + go.mongodb.org/mongo-driver v1.7.3 + go.opentelemetry.io/otel v1.11.2 + go.opentelemetry.io/otel/sdk v1.11.2 + go.opentelemetry.io/otel/trace v1.11.2 + go.uber.org/atomic v1.9.0 + go.uber.org/goleak v1.1.12 + golang.org/x/crypto v0.6.0 + golang.org/x/net v0.8.0 + golang.org/x/oauth2 v0.6.0 + golang.org/x/sync v0.1.0 + golang.org/x/sys v0.6.0 + golang.org/x/term v0.6.0 + golang.org/x/tools v0.6.0 + google.golang.org/api v0.114.0 + google.golang.org/grpc v1.53.0 google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 - google.golang.org/protobuf v1.30.0 + google.golang.org/protobuf v1.29.1 gopkg.in/ory-am/dockertest.v3 v3.3.4 + gopkg.in/square/go-jose.v2 v2.6.0 gotest.tools/gotestsum v1.9.0 - honnef.co/go/tools v0.4.3 - k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5 + k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed layeh.com/radius v0.0.0-20190322222518-890bc1058917 mvdan.cc/gofumpt v0.3.1 nhooyr.io/websocket v1.8.7 @@ -228,22 +216,20 @@ require ( require ( cloud.google.com/go v0.110.0 // indirect - cloud.google.com/go/compute v1.19.3 // indirect + cloud.google.com/go/compute v1.18.0 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect - cloud.google.com/go/iam v1.0.1 // indirect - cloud.google.com/go/kms v1.10.2 // indirect + cloud.google.com/go/iam v0.12.0 // indirect + cloud.google.com/go/kms v1.9.0 // indirect code.cloudfoundry.org/gofileutils v0.0.0-20170111115228-4d0c80011a0f // indirect - github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect - github.com/99designs/keyring v1.2.2 // indirect github.com/Azure/azure-pipeline-go v0.2.3 // indirect github.com/Azure/azure-sdk-for-go v67.2.0+incompatible // indirect - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v4 v4.2.1 // indirect - github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/msi/armmsi v1.1.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.1.1 // indirect - github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 // indirect + github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v4 v4.1.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/msi/armmsi v1.0.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest/azure/auth v0.5.12 // indirect github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 // indirect @@ -252,46 +238,38 @@ require ( github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect - github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0 // indirect - github.com/BurntSushi/toml v1.2.1 // indirect + github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v0.8.1 // indirect + github.com/BurntSushi/toml v1.2.0 // indirect github.com/DataDog/datadog-go v3.2.0+incompatible // indirect github.com/Jeffail/gabs v1.1.1 // indirect github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver v1.5.0 // indirect - github.com/Masterminds/semver/v3 v3.2.1 // indirect github.com/Masterminds/sprig v2.22.0+incompatible // indirect - github.com/Masterminds/sprig/v3 v3.2.3 // indirect - github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/Microsoft/go-winio v0.5.2 // indirect + github.com/Microsoft/hcsshim v0.9.0 // indirect github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect - github.com/agext/levenshtein v1.2.1 // indirect + github.com/PuerkitoBio/purell v1.1.1 // indirect + github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect github.com/andybalholm/brotli v1.0.4 // indirect - github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 // indirect - github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect - github.com/aws/aws-sdk-go-v2 v1.17.7 // indirect - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.13.18 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1 // indirect - github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.23 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 // indirect - github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.12.6 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.18.7 // indirect - github.com/aws/smithy-go v1.13.5 // indirect - github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f // indirect + github.com/apache/arrow/go/arrow v0.0.0-20210818145353-234c94e4ce64 // indirect + github.com/aws/aws-sdk-go-v2 v1.8.0 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.3.2 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.4.0 // indirect + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.4.0 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.2.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.2.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.5.2 // indirect + github.com/aws/aws-sdk-go-v2/service/s3 v1.12.0 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.3.2 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.6.1 // indirect + github.com/aws/smithy-go v1.7.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bgentry/speakeasy v0.1.0 // indirect - github.com/boombuler/barcode v1.0.1 // indirect + github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect github.com/cenkalti/backoff v2.2.1+incompatible // indirect - github.com/cenkalti/backoff/v4 v4.2.0 // indirect + github.com/cenkalti/backoff/v4 v4.1.3 // indirect github.com/census-instrumentation/opencensus-proto v0.4.1 // indirect github.com/centrify/cloud-golang-sdk v0.0.0-20210923165758-a8c48d049166 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect @@ -300,90 +278,88 @@ require ( github.com/cloudflare/circl v1.1.0 // indirect github.com/cloudfoundry-community/go-cfclient v0.0.0-20210823134051-721f0e559306 // indirect github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe // indirect - github.com/cncf/xds/go v0.0.0-20230310173818-32f1caf87195 // indirect - github.com/containerd/containerd v1.7.0 // indirect + github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b // indirect + github.com/containerd/cgroups v1.0.3 // indirect + github.com/containerd/containerd v1.5.17 // indirect github.com/containerd/continuity v0.3.0 // indirect github.com/coreos/go-oidc v2.2.1+incompatible // indirect github.com/coreos/go-oidc/v3 v3.5.0 // indirect github.com/coreos/go-semver v0.3.0 // indirect - github.com/coreos/go-systemd/v22 v22.5.0 // indirect - github.com/couchbase/gocb/v2 v2.6.3 // indirect - github.com/couchbase/gocbcore/v10 v10.2.3 // indirect - github.com/danieljoos/wincred v1.1.2 // indirect + github.com/coreos/go-systemd/v22 v22.3.2 // indirect + github.com/couchbase/gocb/v2 v2.3.3 // indirect + github.com/couchbase/gocbcore/v10 v10.0.4 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba // indirect github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc // indirect github.com/digitalocean/godo v1.7.5 // indirect github.com/dimchansky/utfbom v1.1.1 // indirect github.com/dnephin/pflag v1.0.7 // indirect - github.com/docker/cli v20.10.20+incompatible // indirect - github.com/docker/distribution v2.8.2+incompatible // indirect - github.com/docker/docker v23.0.4+incompatible // indirect - github.com/docker/go-connections v0.4.0 // indirect + github.com/docker/cli v20.10.18+incompatible // indirect + github.com/docker/distribution v2.8.1+incompatible // indirect github.com/docker/go-units v0.5.0 // indirect github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect - github.com/dvsekhvalnov/jose2go v1.5.0 // indirect - github.com/emicklei/go-restful/v3 v3.10.1 // indirect - github.com/envoyproxy/go-control-plane v0.11.0 // indirect - github.com/envoyproxy/protoc-gen-validate v0.10.0 // indirect + github.com/emicklei/go-restful/v3 v3.8.0 // indirect + github.com/envoyproxy/go-control-plane v0.10.3 // indirect + github.com/envoyproxy/protoc-gen-validate v0.9.1 // indirect github.com/evanphx/json-patch/v5 v5.6.0 // indirect github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect - github.com/fsnotify/fsnotify v1.6.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.2 // indirect - github.com/gammazero/deque v0.2.1 // indirect - github.com/gammazero/workerpool v1.1.3 // indirect - github.com/go-asn1-ber/asn1-ber v1.5.4 // indirect + github.com/fsnotify/fsnotify v1.5.4 // indirect + github.com/gabriel-vasile/mimetype v1.3.1 // indirect + github.com/gammazero/deque v0.0.0-20190130191400-2afb3858e9c7 // indirect + github.com/gammazero/workerpool v0.0.0-20190406235159-88d534f22b56 // indirect + github.com/go-asn1-ber/asn1-ber v1.5.1 // indirect + github.com/go-jose/go-jose/v3 v3.0.0 // indirect github.com/go-ldap/ldif v0.0.0-20200320164324-fd88d9b715b3 // indirect github.com/go-logr/logr v1.2.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-openapi/analysis v0.20.0 // indirect github.com/go-openapi/errors v0.20.1 // indirect - github.com/go-openapi/jsonpointer v0.19.6 // indirect - github.com/go-openapi/jsonreference v0.20.1 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.19.5 // indirect github.com/go-openapi/loads v0.20.2 // indirect github.com/go-openapi/runtime v0.19.24 // indirect github.com/go-openapi/spec v0.20.3 // indirect github.com/go-openapi/strfmt v0.20.0 // indirect - github.com/go-openapi/swag v0.22.3 // indirect + github.com/go-openapi/swag v0.19.14 // indirect github.com/go-openapi/validate v0.20.2 // indirect github.com/go-ozzo/ozzo-validation v3.6.0+incompatible // indirect - github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/go-stack/stack v1.8.0 // indirect github.com/gofrs/uuid v4.3.0+incompatible // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/mock v1.6.0 // indirect github.com/golang/snappy v0.0.4 // indirect - github.com/google/flatbuffers v23.1.21+incompatible // indirect + github.com/google/flatbuffers v2.0.0+incompatible // indirect github.com/google/gnostic v0.5.7-v3refs // indirect github.com/google/go-querystring v1.1.0 // indirect - github.com/google/gofuzz v1.2.0 // indirect - github.com/google/s2a-go v0.1.4 // indirect + github.com/google/gofuzz v1.1.0 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/uuid v1.3.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.2.3 // indirect - github.com/googleapis/gax-go/v2 v2.9.1 // indirect + github.com/googleapis/gax-go/v2 v2.7.1 // indirect github.com/gophercloud/gophercloud v0.1.0 // indirect github.com/gorilla/websocket v1.5.0 // indirect - github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed // indirect github.com/hashicorp/cronexpr v1.1.1 // indirect github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-kms-wrapping/entropy/v2 v2.0.0 // indirect github.com/hashicorp/go-msgpack/v2 v2.0.0 // indirect github.com/hashicorp/go-secure-stdlib/fileutil v0.1.0 // indirect - github.com/hashicorp/go-slug v0.11.1 // indirect - github.com/hashicorp/go-tfe v1.25.1 // indirect + github.com/hashicorp/go-slug v0.7.0 // indirect + github.com/hashicorp/go-tfe v0.20.0 // indirect github.com/hashicorp/jsonapi v0.0.0-20210826224640-ee7dae0fb22d // indirect github.com/hashicorp/logutils v1.0.0 // indirect github.com/hashicorp/mdns v1.0.4 // indirect github.com/hashicorp/net-rpc-msgpackrpc/v2 v2.0.0 // indirect github.com/hashicorp/serf v0.10.1 // indirect - github.com/hashicorp/vault/api/auth/kubernetes v0.4.0 // indirect + github.com/hashicorp/vault/api/auth/kubernetes v0.3.0 // indirect github.com/hashicorp/vic v1.5.1-0.20190403131502-bbfe86ec9443 // indirect - github.com/hashicorp/yamux v0.1.1 // indirect - github.com/huandu/xstrings v1.4.0 // indirect - github.com/imdario/mergo v0.3.15 // indirect + github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87 // indirect + github.com/huandu/xstrings v1.3.2 // indirect + github.com/imdario/mergo v0.3.13 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect github.com/jackc/pgconn v1.11.0 // indirect github.com/jackc/pgio v1.0.0 // indirect @@ -402,67 +378,64 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/kelseyhightower/envconfig v1.4.0 // indirect - github.com/klauspost/compress v1.16.5 // indirect + github.com/klauspost/compress v1.15.15 // indirect github.com/klauspost/pgzip v1.2.5 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/lib/pq v1.10.6 // indirect github.com/linode/linodego v0.7.1 // indirect github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect - github.com/mailru/easyjson v0.7.7 // indirect + github.com/mailru/easyjson v0.7.6 // indirect github.com/mattn/go-ieproxy v0.0.1 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect - github.com/mediocregopher/radix/v4 v4.1.2 // indirect - github.com/miekg/dns v1.1.43 // indirect + github.com/mediocregopher/radix/v4 v4.1.1 // indirect + github.com/miekg/dns v1.1.41 // indirect github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect github.com/mitchellh/hashstructure v1.1.0 // indirect github.com/mitchellh/pointerstructure v1.2.1 // indirect - github.com/moby/patternmatcher v0.5.0 // indirect - github.com/moby/sys/sequential v0.5.0 // indirect - github.com/moby/term v0.0.0-20221205130635-1aeaba878587 // indirect + github.com/moby/sys/mount v0.2.0 // indirect + github.com/moby/sys/mountinfo v0.5.0 // indirect + github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/mongodb-forks/digest v1.0.4 // indirect - github.com/montanaflynn/stats v0.7.0 // indirect - github.com/mtibben/percent v0.2.1 // indirect + github.com/mongodb-forks/digest v1.0.3 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/nicolai86/scaleway-sdk v1.10.2-0.20180628010248-798f60e20bb2 // indirect github.com/nwaples/rardecode v1.1.2 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b // indirect - github.com/opencontainers/runc v1.1.6 // indirect + github.com/opencontainers/image-spec v1.0.2 // indirect + github.com/opencontainers/runc v1.1.4 // indirect + github.com/openlyinc/pointy v1.1.2 // indirect github.com/opentracing/opentracing-go v1.2.1-0.20220228012449-10b1cf09e00b // indirect github.com/oracle/oci-go-sdk/v60 v60.0.0 // indirect github.com/packethost/packngo v0.1.1-0.20180711074735-b9cb5096f54c // indirect github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 // indirect github.com/pierrec/lz4 v2.6.1+incompatible // indirect - github.com/pierrec/lz4/v4 v4.1.17 // indirect + github.com/pierrec/lz4/v4 v4.1.8 // indirect github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/pquerna/cachecontrol v0.1.0 // indirect - github.com/prometheus/client_model v0.3.0 // indirect - github.com/prometheus/procfs v0.8.0 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/procfs v0.6.0 // indirect github.com/renier/xmlrpc v0.0.0-20170708154548-ce4a1a486c03 // indirect - github.com/rogpeppe/go-internal v1.10.0 // indirect - github.com/shopspring/decimal v1.3.1 // indirect + github.com/rogpeppe/go-internal v1.9.0 // indirect github.com/sirupsen/logrus v1.9.0 // indirect github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 // indirect - github.com/snowflakedb/gosnowflake v1.6.18 // indirect + github.com/snowflakedb/gosnowflake v1.6.3 // indirect github.com/softlayer/softlayer-go v0.0.0-20180806151055-260589d94c7d // indirect github.com/sony/gobreaker v0.4.2-0.20210216022020-dd874f9dd33b // indirect - github.com/spf13/cast v1.5.1 // indirect github.com/spf13/pflag v1.0.5 // indirect github.com/stretchr/objx v0.5.0 // indirect github.com/tencentcloud/tencentcloud-sdk-go v1.0.162 // indirect - github.com/tilinna/clock v1.1.0 // indirect + github.com/tilinna/clock v1.0.2 // indirect github.com/tklauser/go-sysconf v0.3.10 // indirect github.com/tklauser/numcpus v0.4.0 // indirect github.com/tv42/httpunix v0.0.0-20191220191345-2ba4b9c3382c // indirect github.com/ulikunitz/xz v0.5.10 // indirect github.com/vmware/govmomi v0.18.0 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect - github.com/xdg-go/scram v1.1.1 // indirect - github.com/xdg-go/stringprep v1.0.3 // indirect + github.com/xdg-go/scram v1.0.2 // indirect + github.com/xdg-go/stringprep v1.0.2 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect @@ -470,30 +443,28 @@ require ( github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 // indirect github.com/yusufpapurcu/wmi v1.2.2 // indirect - github.com/zclconf/go-cty v1.12.1 // indirect - go.etcd.io/etcd/api/v3 v3.5.7 // indirect + go.etcd.io/etcd/api/v3 v3.5.0 // indirect go.opencensus.io v0.24.0 // indirect go.uber.org/multierr v1.7.0 // indirect go.uber.org/zap v1.19.1 // indirect - golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a // indirect - golang.org/x/mod v0.9.0 // indirect - golang.org/x/time v0.3.0 // indirect + golang.org/x/mod v0.8.0 // indirect + golang.org/x/text v0.8.0 // indirect + golang.org/x/time v0.0.0-20220411224347-583f2d630306 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20230525154841-bd750badd5c6 // indirect + google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.66.2 // indirect gopkg.in/jcmturner/goidentity.v3 v3.0.0 // indirect gopkg.in/resty.v1 v1.12.0 // indirect - gopkg.in/square/go-jose.v2 v2.6.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - k8s.io/api v0.27.2 // indirect - k8s.io/apimachinery v0.27.2 // indirect - k8s.io/client-go v0.27.2 // indirect - k8s.io/klog/v2 v2.90.1 // indirect - k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f // indirect - sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect + k8s.io/api v0.25.3 // indirect + k8s.io/apimachinery v0.25.3 // indirect + k8s.io/client-go v0.25.3 // indirect + k8s.io/klog/v2 v2.70.1 // indirect + k8s.io/kube-openapi v0.0.0-20220803162953-67bda5d908f1 // indirect + sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 // indirect sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect - sigs.k8s.io/yaml v1.3.0 // indirect + sigs.k8s.io/yaml v1.2.0 // indirect ) diff --git a/go.sum b/go.sum index 1a065c57ff6dac..90b0d94109ea1a 100644 --- a/go.sum +++ b/go.sum @@ -6,7 +6,6 @@ cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSR cloud.google.com/go v0.39.0/go.mod h1:rVLT6fkc8chs9sfPtFc1SBH6em7n+ZoXaG+87tDISts= cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= @@ -17,578 +16,89 @@ cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKV cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= -cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= -cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= -cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= -cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= -cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= -cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= -cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU= -cloud.google.com/go v0.104.0/go.mod h1:OO6xxXdJyvuJPcEPBLN9BJPD+jep5G1+2U5B5gkRYtA= -cloud.google.com/go v0.105.0/go.mod h1:PrLgOJNe5nfE9UMxKxgXj4mD3voiP+YQ6gdt6KMFOKM= -cloud.google.com/go v0.107.0/go.mod h1:wpc2eNrD7hXUTy8EKS10jkxpZBjASrORK7goS+3YX2I= cloud.google.com/go v0.110.0 h1:Zc8gqp3+a9/Eyph2KDmcGaPtbKRIoqq4YTlL4NMD0Ys= cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMzaY= -cloud.google.com/go/accessapproval v1.4.0/go.mod h1:zybIuC3KpDOvotz59lFe5qxRZx6C75OtwbisN56xYB4= -cloud.google.com/go/accessapproval v1.5.0/go.mod h1:HFy3tuiGvMdcd/u+Cu5b9NkO1pEICJ46IR82PoUdplw= -cloud.google.com/go/accessapproval v1.6.0/go.mod h1:R0EiYnwV5fsRFiKZkPHr6mwyk2wxUJ30nL4j2pcFY2E= -cloud.google.com/go/accesscontextmanager v1.3.0/go.mod h1:TgCBehyr5gNMz7ZaH9xubp+CE8dkrszb4oK9CWyvD4o= -cloud.google.com/go/accesscontextmanager v1.4.0/go.mod h1:/Kjh7BBu/Gh83sv+K60vN9QE5NJcd80sU33vIe2IFPE= -cloud.google.com/go/accesscontextmanager v1.6.0/go.mod h1:8XCvZWfYw3K/ji0iVnp+6pu7huxoQTLmxAbVjbloTtM= -cloud.google.com/go/aiplatform v1.22.0/go.mod h1:ig5Nct50bZlzV6NvKaTwmplLLddFx0YReh9WfTO5jKw= -cloud.google.com/go/aiplatform v1.24.0/go.mod h1:67UUvRBKG6GTayHKV8DBv2RtR1t93YRu5B1P3x99mYY= -cloud.google.com/go/aiplatform v1.27.0/go.mod h1:Bvxqtl40l0WImSb04d0hXFU7gDOiq9jQmorivIiWcKg= -cloud.google.com/go/aiplatform v1.35.0/go.mod h1:7MFT/vCaOyZT/4IIFfxH4ErVg/4ku6lKv3w0+tFTgXQ= -cloud.google.com/go/analytics v0.11.0/go.mod h1:DjEWCu41bVbYcKyvlws9Er60YE4a//bK6mnhWvQeFNI= -cloud.google.com/go/analytics v0.12.0/go.mod h1:gkfj9h6XRf9+TS4bmuhPEShsh3hH8PAZzm/41OOhQd4= -cloud.google.com/go/analytics v0.18.0/go.mod h1:ZkeHGQlcIPkw0R/GW+boWHhCOR43xz9RN/jn7WcqfIE= -cloud.google.com/go/apigateway v1.3.0/go.mod h1:89Z8Bhpmxu6AmUxuVRg/ECRGReEdiP3vQtk4Z1J9rJk= -cloud.google.com/go/apigateway v1.4.0/go.mod h1:pHVY9MKGaH9PQ3pJ4YLzoj6U5FUDeDFBllIz7WmzJoc= -cloud.google.com/go/apigateway v1.5.0/go.mod h1:GpnZR3Q4rR7LVu5951qfXPJCHquZt02jf7xQx7kpqN8= -cloud.google.com/go/apigeeconnect v1.3.0/go.mod h1:G/AwXFAKo0gIXkPTVfZDd2qA1TxBXJ3MgMRBQkIi9jc= -cloud.google.com/go/apigeeconnect v1.4.0/go.mod h1:kV4NwOKqjvt2JYR0AoIWo2QGfoRtn/pkS3QlHp0Ni04= -cloud.google.com/go/apigeeconnect v1.5.0/go.mod h1:KFaCqvBRU6idyhSNyn3vlHXc8VMDJdRmwDF6JyFRqZ8= -cloud.google.com/go/apigeeregistry v0.4.0/go.mod h1:EUG4PGcsZvxOXAdyEghIdXwAEi/4MEaoqLMLDMIwKXY= -cloud.google.com/go/apigeeregistry v0.5.0/go.mod h1:YR5+s0BVNZfVOUkMa5pAR2xGd0A473vA5M7j247o1wM= -cloud.google.com/go/apikeys v0.4.0/go.mod h1:XATS/yqZbaBK0HOssf+ALHp8jAlNHUgyfprvNcBIszU= -cloud.google.com/go/apikeys v0.5.0/go.mod h1:5aQfwY4D+ewMMWScd3hm2en3hCj+BROlyrt3ytS7KLI= -cloud.google.com/go/appengine v1.4.0/go.mod h1:CS2NhuBuDXM9f+qscZ6V86m1MIIqPj3WC/UoEuR1Sno= -cloud.google.com/go/appengine v1.5.0/go.mod h1:TfasSozdkFI0zeoxW3PTBLiNqRmzraodCWatWI9Dmak= -cloud.google.com/go/appengine v1.6.0/go.mod h1:hg6i0J/BD2cKmDJbaFSYHFyZkgBEfQrDg/X0V5fJn84= -cloud.google.com/go/area120 v0.5.0/go.mod h1:DE/n4mp+iqVyvxHN41Vf1CR602GiHQjFPusMFW6bGR4= -cloud.google.com/go/area120 v0.6.0/go.mod h1:39yFJqWVgm0UZqWTOdqkLhjoC7uFfgXRC8g/ZegeAh0= -cloud.google.com/go/area120 v0.7.1/go.mod h1:j84i4E1RboTWjKtZVWXPqvK5VHQFJRF2c1Nm69pWm9k= -cloud.google.com/go/artifactregistry v1.6.0/go.mod h1:IYt0oBPSAGYj/kprzsBjZ/4LnG/zOcHyFHjWPCi6SAQ= -cloud.google.com/go/artifactregistry v1.7.0/go.mod h1:mqTOFOnGZx8EtSqK/ZWcsm/4U8B77rbcLP6ruDU2Ixk= -cloud.google.com/go/artifactregistry v1.8.0/go.mod h1:w3GQXkJX8hiKN0v+at4b0qotwijQbYUqF2GWkZzAhC0= -cloud.google.com/go/artifactregistry v1.9.0/go.mod h1:2K2RqvA2CYvAeARHRkLDhMDJ3OXy26h3XW+3/Jh2uYc= -cloud.google.com/go/artifactregistry v1.11.2/go.mod h1:nLZns771ZGAwVLzTX/7Al6R9ehma4WUEhZGWV6CeQNQ= -cloud.google.com/go/asset v1.5.0/go.mod h1:5mfs8UvcM5wHhqtSv8J1CtxxaQq3AdBxxQi2jGW/K4o= -cloud.google.com/go/asset v1.7.0/go.mod h1:YbENsRK4+xTiL+Ofoj5Ckf+O17kJtgp3Y3nn4uzZz5s= -cloud.google.com/go/asset v1.8.0/go.mod h1:mUNGKhiqIdbr8X7KNayoYvyc4HbbFO9URsjbytpUaW0= -cloud.google.com/go/asset v1.9.0/go.mod h1:83MOE6jEJBMqFKadM9NLRcs80Gdw76qGuHn8m3h8oHQ= -cloud.google.com/go/asset v1.10.0/go.mod h1:pLz7uokL80qKhzKr4xXGvBQXnzHn5evJAEAtZiIb0wY= -cloud.google.com/go/asset v1.11.1/go.mod h1:fSwLhbRvC9p9CXQHJ3BgFeQNM4c9x10lqlrdEUYXlJo= -cloud.google.com/go/assuredworkloads v1.5.0/go.mod h1:n8HOZ6pff6re5KYfBXcFvSViQjDwxFkAkmUFffJRbbY= -cloud.google.com/go/assuredworkloads v1.6.0/go.mod h1:yo2YOk37Yc89Rsd5QMVECvjaMKymF9OP+QXWlKXUkXw= -cloud.google.com/go/assuredworkloads v1.7.0/go.mod h1:z/736/oNmtGAyU47reJgGN+KVoYoxeLBoj4XkKYscNI= -cloud.google.com/go/assuredworkloads v1.8.0/go.mod h1:AsX2cqyNCOvEQC8RMPnoc0yEarXQk6WEKkxYfL6kGIo= -cloud.google.com/go/assuredworkloads v1.9.0/go.mod h1:kFuI1P78bplYtT77Tb1hi0FMxM0vVpRC7VVoJC3ZoT0= -cloud.google.com/go/assuredworkloads v1.10.0/go.mod h1:kwdUQuXcedVdsIaKgKTp9t0UJkE5+PAVNhdQm4ZVq2E= -cloud.google.com/go/automl v1.5.0/go.mod h1:34EjfoFGMZ5sgJ9EoLsRtdPSNZLcfflJR39VbVNS2M0= -cloud.google.com/go/automl v1.6.0/go.mod h1:ugf8a6Fx+zP0D59WLhqgTDsQI9w07o64uf/Is3Nh5p8= -cloud.google.com/go/automl v1.7.0/go.mod h1:RL9MYCCsJEOmt0Wf3z9uzG0a7adTT1fe+aObgSpkCt8= -cloud.google.com/go/automl v1.8.0/go.mod h1:xWx7G/aPEe/NP+qzYXktoBSDfjO+vnKMGgsApGJJquM= -cloud.google.com/go/automl v1.12.0/go.mod h1:tWDcHDp86aMIuHmyvjuKeeHEGq76lD7ZqfGLN6B0NuU= -cloud.google.com/go/baremetalsolution v0.3.0/go.mod h1:XOrocE+pvK1xFfleEnShBlNAXf+j5blPPxrhjKgnIFc= -cloud.google.com/go/baremetalsolution v0.4.0/go.mod h1:BymplhAadOO/eBa7KewQ0Ppg4A4Wplbn+PsFKRLo0uI= -cloud.google.com/go/baremetalsolution v0.5.0/go.mod h1:dXGxEkmR9BMwxhzBhV0AioD0ULBmuLZI8CdwalUxuss= -cloud.google.com/go/batch v0.3.0/go.mod h1:TR18ZoAekj1GuirsUsR1ZTKN3FC/4UDnScjT8NXImFE= -cloud.google.com/go/batch v0.4.0/go.mod h1:WZkHnP43R/QCGQsZ+0JyG4i79ranE2u8xvjq/9+STPE= -cloud.google.com/go/batch v0.7.0/go.mod h1:vLZN95s6teRUqRQ4s3RLDsH8PvboqBK+rn1oevL159g= -cloud.google.com/go/beyondcorp v0.2.0/go.mod h1:TB7Bd+EEtcw9PCPQhCJtJGjk/7TC6ckmnSFS+xwTfm4= -cloud.google.com/go/beyondcorp v0.3.0/go.mod h1:E5U5lcrcXMsCuoDNyGrpyTm/hn7ne941Jz2vmksAxW8= -cloud.google.com/go/beyondcorp v0.4.0/go.mod h1:3ApA0mbhHx6YImmuubf5pyW8srKnCEPON32/5hj+RmM= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/bigquery v1.42.0/go.mod h1:8dRTJxhtG+vwBKzE5OseQn/hiydoQN3EedCaOdYmxRA= -cloud.google.com/go/bigquery v1.43.0/go.mod h1:ZMQcXHsl+xmU1z36G2jNGZmKp9zNY5BUua5wDgmNCfw= -cloud.google.com/go/bigquery v1.44.0/go.mod h1:0Y33VqXTEsbamHJvJHdFmtqHvMIY28aK1+dFsvaChGc= -cloud.google.com/go/bigquery v1.48.0/go.mod h1:QAwSz+ipNgfL5jxiaK7weyOhzdoAy1zFm0Nf1fysJac= -cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY= -cloud.google.com/go/billing v1.5.0/go.mod h1:mztb1tBc3QekhjSgmpf/CV4LzWXLzCArwpLmP2Gm88s= -cloud.google.com/go/billing v1.6.0/go.mod h1:WoXzguj+BeHXPbKfNWkqVtDdzORazmCjraY+vrxcyvI= -cloud.google.com/go/billing v1.7.0/go.mod h1:q457N3Hbj9lYwwRbnlD7vUpyjq6u5U1RAOArInEiD5Y= -cloud.google.com/go/billing v1.12.0/go.mod h1:yKrZio/eu+okO/2McZEbch17O5CB5NpZhhXG6Z766ss= -cloud.google.com/go/binaryauthorization v1.1.0/go.mod h1:xwnoWu3Y84jbuHa0zd526MJYmtnVXn0syOjaJgy4+dM= -cloud.google.com/go/binaryauthorization v1.2.0/go.mod h1:86WKkJHtRcv5ViNABtYMhhNWRrD1Vpi//uKEy7aYEfI= -cloud.google.com/go/binaryauthorization v1.3.0/go.mod h1:lRZbKgjDIIQvzYQS1p99A7/U1JqvqeZg0wiI5tp6tg0= -cloud.google.com/go/binaryauthorization v1.4.0/go.mod h1:tsSPQrBd77VLplV70GUhBf/Zm3FsKmgSqgm4UmiDItk= -cloud.google.com/go/binaryauthorization v1.5.0/go.mod h1:OSe4OU1nN/VswXKRBmciKpo9LulY41gch5c68htf3/Q= -cloud.google.com/go/certificatemanager v1.3.0/go.mod h1:n6twGDvcUBFu9uBgt4eYvvf3sQ6My8jADcOVwHmzadg= -cloud.google.com/go/certificatemanager v1.4.0/go.mod h1:vowpercVFyqs8ABSmrdV+GiFf2H/ch3KyudYQEMM590= -cloud.google.com/go/certificatemanager v1.6.0/go.mod h1:3Hh64rCKjRAX8dXgRAyOcY5vQ/fE1sh8o+Mdd6KPgY8= -cloud.google.com/go/channel v1.8.0/go.mod h1:W5SwCXDJsq/rg3tn3oG0LOxpAo6IMxNa09ngphpSlnk= -cloud.google.com/go/channel v1.9.0/go.mod h1:jcu05W0my9Vx4mt3/rEHpfxc9eKi9XwsdDL8yBMbKUk= -cloud.google.com/go/channel v1.11.0/go.mod h1:IdtI0uWGqhEeatSB62VOoJ8FSUhJ9/+iGkJVqp74CGE= -cloud.google.com/go/cloudbuild v1.3.0/go.mod h1:WequR4ULxlqvMsjDEEEFnOG5ZSRSgWOywXYDb1vPE6U= -cloud.google.com/go/cloudbuild v1.4.0/go.mod h1:5Qwa40LHiOXmz3386FrjrYM93rM/hdRr7b53sySrTqA= -cloud.google.com/go/cloudbuild v1.7.0/go.mod h1:zb5tWh2XI6lR9zQmsm1VRA+7OCuve5d8S+zJUul8KTg= -cloud.google.com/go/clouddms v1.3.0/go.mod h1:oK6XsCDdW4Ib3jCCBugx+gVjevp2TMXFtgxvPSee3OM= -cloud.google.com/go/clouddms v1.4.0/go.mod h1:Eh7sUGCC+aKry14O1NRljhjyrr0NFC0G2cjwX0cByRk= -cloud.google.com/go/clouddms v1.5.0/go.mod h1:QSxQnhikCLUw13iAbffF2CZxAER3xDGNHjsTAkQJcQA= -cloud.google.com/go/cloudtasks v1.5.0/go.mod h1:fD92REy1x5woxkKEkLdvavGnPJGEn8Uic9nWuLzqCpY= -cloud.google.com/go/cloudtasks v1.6.0/go.mod h1:C6Io+sxuke9/KNRkbQpihnW93SWDU3uXt92nu85HkYI= -cloud.google.com/go/cloudtasks v1.7.0/go.mod h1:ImsfdYWwlWNJbdgPIIGJWC+gemEGTBK/SunNQQNCAb4= -cloud.google.com/go/cloudtasks v1.8.0/go.mod h1:gQXUIwCSOI4yPVK7DgTVFiiP0ZW/eQkydWzwVMdHxrI= -cloud.google.com/go/cloudtasks v1.9.0/go.mod h1:w+EyLsVkLWHcOaqNEyvcKAsWp9p29dL6uL9Nst1cI7Y= -cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= -cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= -cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= -cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= -cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= -cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= -cloud.google.com/go/compute v1.10.0/go.mod h1:ER5CLbMxl90o2jtNbGSbtfOpQKR0t15FOtRsugnLrlU= -cloud.google.com/go/compute v1.12.0/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU= -cloud.google.com/go/compute v1.12.1/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU= -cloud.google.com/go/compute v1.13.0/go.mod h1:5aPTS0cUNMIc1CE546K+Th6weJUNQErARyZtRXDJ8GE= -cloud.google.com/go/compute v1.14.0/go.mod h1:YfLtxrj9sU4Yxv+sXzZkyPjEyPBZfXHUvjxega5vAdo= -cloud.google.com/go/compute v1.15.1/go.mod h1:bjjoF/NtFUrkD/urWfdHaKuOPDR5nWIs63rR+SXhcpA= +cloud.google.com/go/compute v1.18.0 h1:FEigFqoDbys2cvFkZ9Fjq4gnHBP55anJ0yQyau2f9oY= cloud.google.com/go/compute v1.18.0/go.mod h1:1X7yHxec2Ga+Ss6jPyjxRxpu2uu7PLgsOVXvgU0yacs= -cloud.google.com/go/compute v1.19.3 h1:DcTwsFgGev/wV5+q8o2fzgcHOaac+DKGC91ZlvpsQds= -cloud.google.com/go/compute v1.19.3/go.mod h1:qxvISKp/gYnXkSAD1ppcSOveRAmzxicEv/JlizULFrI= -cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZEXYonfTBHHFPO/4UU= cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= -cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY= -cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck= -cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= -cloud.google.com/go/container v1.6.0/go.mod h1:Xazp7GjJSeUYo688S+6J5V+n/t+G5sKBTFkKNudGRxg= -cloud.google.com/go/container v1.7.0/go.mod h1:Dp5AHtmothHGX3DwwIHPgq45Y8KmNsgN3amoYfxVkLo= -cloud.google.com/go/container v1.13.1/go.mod h1:6wgbMPeQRw9rSnKBCAJXnds3Pzj03C4JHamr8asWKy4= -cloud.google.com/go/containeranalysis v0.5.1/go.mod h1:1D92jd8gRR/c0fGMlymRgxWD3Qw9C1ff6/T7mLgVL8I= -cloud.google.com/go/containeranalysis v0.6.0/go.mod h1:HEJoiEIu+lEXM+k7+qLCci0h33lX3ZqoYFdmPcoO7s4= -cloud.google.com/go/containeranalysis v0.7.0/go.mod h1:9aUL+/vZ55P2CXfuZjS4UjQ9AgXoSw8Ts6lemfmxBxI= -cloud.google.com/go/datacatalog v1.3.0/go.mod h1:g9svFY6tuR+j+hrTw3J2dNcmI0dzmSiyOzm8kpLq0a0= -cloud.google.com/go/datacatalog v1.5.0/go.mod h1:M7GPLNQeLfWqeIm3iuiruhPzkt65+Bx8dAKvScX8jvs= -cloud.google.com/go/datacatalog v1.6.0/go.mod h1:+aEyF8JKg+uXcIdAmmaMUmZ3q1b/lKLtXCmXdnc0lbc= -cloud.google.com/go/datacatalog v1.7.0/go.mod h1:9mEl4AuDYWw81UGc41HonIHH7/sn52H0/tc8f8ZbZIE= -cloud.google.com/go/datacatalog v1.8.0/go.mod h1:KYuoVOv9BM8EYz/4eMFxrr4DUKhGIOXxZoKYF5wdISM= -cloud.google.com/go/datacatalog v1.8.1/go.mod h1:RJ58z4rMp3gvETA465Vg+ag8BGgBdnRPEMMSTr5Uv+M= -cloud.google.com/go/datacatalog v1.12.0/go.mod h1:CWae8rFkfp6LzLumKOnmVh4+Zle4A3NXLzVJ1d1mRm0= -cloud.google.com/go/dataflow v0.6.0/go.mod h1:9QwV89cGoxjjSR9/r7eFDqqjtvbKxAK2BaYU6PVk9UM= -cloud.google.com/go/dataflow v0.7.0/go.mod h1:PX526vb4ijFMesO1o202EaUmouZKBpjHsTlCtB4parQ= -cloud.google.com/go/dataflow v0.8.0/go.mod h1:Rcf5YgTKPtQyYz8bLYhFoIV/vP39eL7fWNcSOyFfLJE= -cloud.google.com/go/dataform v0.3.0/go.mod h1:cj8uNliRlHpa6L3yVhDOBrUXH+BPAO1+KFMQQNSThKo= -cloud.google.com/go/dataform v0.4.0/go.mod h1:fwV6Y4Ty2yIFL89huYlEkwUPtS7YZinZbzzj5S9FzCE= -cloud.google.com/go/dataform v0.5.0/go.mod h1:GFUYRe8IBa2hcomWplodVmUx/iTL0FrsauObOM3Ipr0= -cloud.google.com/go/dataform v0.6.0/go.mod h1:QPflImQy33e29VuapFdf19oPbE4aYTJxr31OAPV+ulA= -cloud.google.com/go/datafusion v1.4.0/go.mod h1:1Zb6VN+W6ALo85cXnM1IKiPw+yQMKMhB9TsTSRDo/38= -cloud.google.com/go/datafusion v1.5.0/go.mod h1:Kz+l1FGHB0J+4XF2fud96WMmRiq/wj8N9u007vyXZ2w= -cloud.google.com/go/datafusion v1.6.0/go.mod h1:WBsMF8F1RhSXvVM8rCV3AeyWVxcC2xY6vith3iw3S+8= -cloud.google.com/go/datalabeling v0.5.0/go.mod h1:TGcJ0G2NzcsXSE/97yWjIZO0bXj0KbVlINXMG9ud42I= -cloud.google.com/go/datalabeling v0.6.0/go.mod h1:WqdISuk/+WIGeMkpw/1q7bK/tFEZxsrFJOJdY2bXvTQ= -cloud.google.com/go/datalabeling v0.7.0/go.mod h1:WPQb1y08RJbmpM3ww0CSUAGweL0SxByuW2E+FU+wXcM= -cloud.google.com/go/dataplex v1.3.0/go.mod h1:hQuRtDg+fCiFgC8j0zV222HvzFQdRd+SVX8gdmFcZzA= -cloud.google.com/go/dataplex v1.4.0/go.mod h1:X51GfLXEMVJ6UN47ESVqvlsRplbLhcsAt0kZCCKsU0A= -cloud.google.com/go/dataplex v1.5.2/go.mod h1:cVMgQHsmfRoI5KFYq4JtIBEUbYwc3c7tXmIDhRmNNVQ= -cloud.google.com/go/dataproc v1.7.0/go.mod h1:CKAlMjII9H90RXaMpSxQ8EU6dQx6iAYNPcYPOkSbi8s= -cloud.google.com/go/dataproc v1.8.0/go.mod h1:5OW+zNAH0pMpw14JVrPONsxMQYMBqJuzORhIBfBn9uI= -cloud.google.com/go/dataproc v1.12.0/go.mod h1:zrF3aX0uV3ikkMz6z4uBbIKyhRITnxvr4i3IjKsKrw4= -cloud.google.com/go/dataqna v0.5.0/go.mod h1:90Hyk596ft3zUQ8NkFfvICSIfHFh1Bc7C4cK3vbhkeo= -cloud.google.com/go/dataqna v0.6.0/go.mod h1:1lqNpM7rqNLVgWBJyk5NF6Uen2PHym0jtVJonplVsDA= -cloud.google.com/go/dataqna v0.7.0/go.mod h1:Lx9OcIIeqCrw1a6KdO3/5KMP1wAmTc0slZWwP12Qq3c= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/datastore v1.10.0/go.mod h1:PC5UzAmDEkAmkfaknstTYbNpgE49HAgW2J1gcgUfmdM= -cloud.google.com/go/datastream v1.2.0/go.mod h1:i/uTP8/fZwgATHS/XFu0TcNUhuA0twZxxQ3EyCUQMwo= -cloud.google.com/go/datastream v1.3.0/go.mod h1:cqlOX8xlyYF/uxhiKn6Hbv6WjwPPuI9W2M9SAXwaLLQ= -cloud.google.com/go/datastream v1.4.0/go.mod h1:h9dpzScPhDTs5noEMQVWP8Wx8AFBRyS0s8KWPx/9r0g= -cloud.google.com/go/datastream v1.5.0/go.mod h1:6TZMMNPwjUqZHBKPQ1wwXpb0d5VDVPl2/XoS5yi88q4= -cloud.google.com/go/datastream v1.6.0/go.mod h1:6LQSuswqLa7S4rPAOZFVjHIG3wJIjZcZrw8JDEDJuIs= -cloud.google.com/go/deploy v1.4.0/go.mod h1:5Xghikd4VrmMLNaF6FiRFDlHb59VM59YoDQnOUdsH/c= -cloud.google.com/go/deploy v1.5.0/go.mod h1:ffgdD0B89tToyW/U/D2eL0jN2+IEV/3EMuXHA0l4r+s= -cloud.google.com/go/deploy v1.6.0/go.mod h1:f9PTHehG/DjCom3QH0cntOVRm93uGBDt2vKzAPwpXQI= -cloud.google.com/go/dialogflow v1.15.0/go.mod h1:HbHDWs33WOGJgn6rfzBW1Kv807BE3O1+xGbn59zZWI4= -cloud.google.com/go/dialogflow v1.16.1/go.mod h1:po6LlzGfK+smoSmTBnbkIZY2w8ffjz/RcGSS+sh1el0= -cloud.google.com/go/dialogflow v1.17.0/go.mod h1:YNP09C/kXA1aZdBgC/VtXX74G/TKn7XVCcVumTflA+8= -cloud.google.com/go/dialogflow v1.18.0/go.mod h1:trO7Zu5YdyEuR+BhSNOqJezyFQ3aUzz0njv7sMx/iek= -cloud.google.com/go/dialogflow v1.19.0/go.mod h1:JVmlG1TwykZDtxtTXujec4tQ+D8SBFMoosgy+6Gn0s0= -cloud.google.com/go/dialogflow v1.29.0/go.mod h1:b+2bzMe+k1s9V+F2jbJwpHPzrnIyHihAdRFMtn2WXuM= -cloud.google.com/go/dialogflow v1.31.0/go.mod h1:cuoUccuL1Z+HADhyIA7dci3N5zUssgpBJmCzI6fNRB4= -cloud.google.com/go/dlp v1.6.0/go.mod h1:9eyB2xIhpU0sVwUixfBubDoRwP+GjeUoxxeueZmqvmM= -cloud.google.com/go/dlp v1.7.0/go.mod h1:68ak9vCiMBjbasxeVD17hVPxDEck+ExiHavX8kiHG+Q= -cloud.google.com/go/dlp v1.9.0/go.mod h1:qdgmqgTyReTz5/YNSSuueR8pl7hO0o9bQ39ZhtgkWp4= -cloud.google.com/go/documentai v1.7.0/go.mod h1:lJvftZB5NRiFSX4moiye1SMxHx0Bc3x1+p9e/RfXYiU= -cloud.google.com/go/documentai v1.8.0/go.mod h1:xGHNEB7CtsnySCNrCFdCyyMz44RhFEEX2Q7UD0c5IhU= -cloud.google.com/go/documentai v1.9.0/go.mod h1:FS5485S8R00U10GhgBC0aNGrJxBP8ZVpEeJ7PQDZd6k= -cloud.google.com/go/documentai v1.10.0/go.mod h1:vod47hKQIPeCfN2QS/jULIvQTugbmdc0ZvxxfQY1bg4= -cloud.google.com/go/documentai v1.16.0/go.mod h1:o0o0DLTEZ+YnJZ+J4wNfTxmDVyrkzFvttBXXtYRMHkM= -cloud.google.com/go/domains v0.6.0/go.mod h1:T9Rz3GasrpYk6mEGHh4rymIhjlnIuB4ofT1wTxDeT4Y= -cloud.google.com/go/domains v0.7.0/go.mod h1:PtZeqS1xjnXuRPKE/88Iru/LdfoRyEHYA9nFQf4UKpg= -cloud.google.com/go/domains v0.8.0/go.mod h1:M9i3MMDzGFXsydri9/vW+EWz9sWb4I6WyHqdlAk0idE= -cloud.google.com/go/edgecontainer v0.1.0/go.mod h1:WgkZ9tp10bFxqO8BLPqv2LlfmQF1X8lZqwW4r1BTajk= -cloud.google.com/go/edgecontainer v0.2.0/go.mod h1:RTmLijy+lGpQ7BXuTDa4C4ssxyXT34NIuHIgKuP4s5w= -cloud.google.com/go/edgecontainer v0.3.0/go.mod h1:FLDpP4nykgwwIfcLt6zInhprzw0lEi2P1fjO6Ie0qbc= -cloud.google.com/go/errorreporting v0.3.0/go.mod h1:xsP2yaAp+OAW4OIm60An2bbLpqIhKXdWR/tawvl7QzU= -cloud.google.com/go/essentialcontacts v1.3.0/go.mod h1:r+OnHa5jfj90qIfZDO/VztSFqbQan7HV75p8sA+mdGI= -cloud.google.com/go/essentialcontacts v1.4.0/go.mod h1:8tRldvHYsmnBCHdFpvU+GL75oWiBKl80BiqlFh9tp+8= -cloud.google.com/go/essentialcontacts v1.5.0/go.mod h1:ay29Z4zODTuwliK7SnX8E86aUF2CTzdNtvv42niCX0M= -cloud.google.com/go/eventarc v1.7.0/go.mod h1:6ctpF3zTnaQCxUjHUdcfgcA1A2T309+omHZth7gDfmc= -cloud.google.com/go/eventarc v1.8.0/go.mod h1:imbzxkyAU4ubfsaKYdQg04WS1NvncblHEup4kvF+4gw= -cloud.google.com/go/eventarc v1.10.0/go.mod h1:u3R35tmZ9HvswGRBnF48IlYgYeBcPUCjkr4BTdem2Kw= -cloud.google.com/go/filestore v1.3.0/go.mod h1:+qbvHGvXU1HaKX2nD0WEPo92TP/8AQuCVEBXNY9z0+w= -cloud.google.com/go/filestore v1.4.0/go.mod h1:PaG5oDfo9r224f8OYXURtAsY+Fbyq/bLYoINEK8XQAI= -cloud.google.com/go/filestore v1.5.0/go.mod h1:FqBXDWBp4YLHqRnVGveOkHDf8svj9r5+mUDLupOWEDs= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= -cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE= -cloud.google.com/go/functions v1.6.0/go.mod h1:3H1UA3qiIPRWD7PeZKLvHZ9SaQhR26XIJcC0A5GbvAk= -cloud.google.com/go/functions v1.7.0/go.mod h1:+d+QBcWM+RsrgZfV9xo6KfA1GlzJfxcfZcRPEhDDfzg= -cloud.google.com/go/functions v1.8.0/go.mod h1:RTZ4/HsQjIqIYP9a9YPbU+QFoQsAlYgrwOXJWHn1POY= -cloud.google.com/go/functions v1.9.0/go.mod h1:Y+Dz8yGguzO3PpIjhLTbnqV1CWmgQ5UwtlpzoyquQ08= -cloud.google.com/go/functions v1.10.0/go.mod h1:0D3hEOe3DbEvCXtYOZHQZmD+SzYsi1YbI7dGvHfldXw= -cloud.google.com/go/gaming v1.5.0/go.mod h1:ol7rGcxP/qHTRQE/RO4bxkXq+Fix0j6D4LFPzYTIrDM= -cloud.google.com/go/gaming v1.6.0/go.mod h1:YMU1GEvA39Qt3zWGyAVA9bpYz/yAhTvaQ1t2sK4KPUA= -cloud.google.com/go/gaming v1.7.0/go.mod h1:LrB8U7MHdGgFG851iHAfqUdLcKBdQ55hzXy9xBJz0+w= -cloud.google.com/go/gaming v1.8.0/go.mod h1:xAqjS8b7jAVW0KFYeRUxngo9My3f33kFmua++Pi+ggM= -cloud.google.com/go/gaming v1.9.0/go.mod h1:Fc7kEmCObylSWLO334NcO+O9QMDyz+TKC4v1D7X+Bc0= -cloud.google.com/go/gkebackup v0.2.0/go.mod h1:XKvv/4LfG829/B8B7xRkk8zRrOEbKtEam6yNfuQNH60= -cloud.google.com/go/gkebackup v0.3.0/go.mod h1:n/E671i1aOQvUxT541aTkCwExO/bTer2HDlj4TsBRAo= -cloud.google.com/go/gkebackup v0.4.0/go.mod h1:byAyBGUwYGEEww7xsbnUTBHIYcOPy/PgUWUtOeRm9Vg= -cloud.google.com/go/gkeconnect v0.5.0/go.mod h1:c5lsNAg5EwAy7fkqX/+goqFsU1Da/jQFqArp+wGNr/o= -cloud.google.com/go/gkeconnect v0.6.0/go.mod h1:Mln67KyU/sHJEBY8kFZ0xTeyPtzbq9StAVvEULYK16A= -cloud.google.com/go/gkeconnect v0.7.0/go.mod h1:SNfmVqPkaEi3bF/B3CNZOAYPYdg7sU+obZ+QTky2Myw= -cloud.google.com/go/gkehub v0.9.0/go.mod h1:WYHN6WG8w9bXU0hqNxt8rm5uxnk8IH+lPY9J2TV7BK0= -cloud.google.com/go/gkehub v0.10.0/go.mod h1:UIPwxI0DsrpsVoWpLB0stwKCP+WFVG9+y977wO+hBH0= -cloud.google.com/go/gkehub v0.11.0/go.mod h1:JOWHlmN+GHyIbuWQPl47/C2RFhnFKH38jH9Ascu3n0E= -cloud.google.com/go/gkemulticloud v0.3.0/go.mod h1:7orzy7O0S+5kq95e4Hpn7RysVA7dPs8W/GgfUtsPbrA= -cloud.google.com/go/gkemulticloud v0.4.0/go.mod h1:E9gxVBnseLWCk24ch+P9+B2CoDFJZTyIgLKSalC7tuI= -cloud.google.com/go/gkemulticloud v0.5.0/go.mod h1:W0JDkiyi3Tqh0TJr//y19wyb1yf8llHVto2Htf2Ja3Y= -cloud.google.com/go/grafeas v0.2.0/go.mod h1:KhxgtF2hb0P191HlY5besjYm6MqTSTj3LSI+M+ByZHc= -cloud.google.com/go/gsuiteaddons v1.3.0/go.mod h1:EUNK/J1lZEZO8yPtykKxLXI6JSVN2rg9bN8SXOa0bgM= -cloud.google.com/go/gsuiteaddons v1.4.0/go.mod h1:rZK5I8hht7u7HxFQcFei0+AtfS9uSushomRlg+3ua1o= -cloud.google.com/go/gsuiteaddons v1.5.0/go.mod h1:TFCClYLd64Eaa12sFVmUyG62tk4mdIsI7pAnSXRkcFo= -cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= -cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= -cloud.google.com/go/iam v0.5.0/go.mod h1:wPU9Vt0P4UmCux7mqtRu6jcpPAb74cP1fh50J3QpkUc= -cloud.google.com/go/iam v0.6.0/go.mod h1:+1AH33ueBne5MzYccyMHtEKqLE4/kJOibtffMHDMFMc= -cloud.google.com/go/iam v0.7.0/go.mod h1:H5Br8wRaDGNc8XP3keLc4unfUUZeyH3Sfl9XpQEYOeg= -cloud.google.com/go/iam v0.8.0/go.mod h1:lga0/y3iH6CX7sYqypWJ33hf7kkfXJag67naqGESjkE= -cloud.google.com/go/iam v0.11.0/go.mod h1:9PiLDanza5D+oWFZiH1uG+RnRCfEGKoyl6yo4cgWZGY= +cloud.google.com/go/iam v0.12.0 h1:DRtTY29b75ciH6Ov1PHb4/iat2CLCvrOm40Q0a6DFpE= cloud.google.com/go/iam v0.12.0/go.mod h1:knyHGviacl11zrtZUoDuYpDgLjvr28sLQaG0YB2GYAY= -cloud.google.com/go/iam v1.0.1 h1:lyeCAU6jpnVNrE9zGQkTl3WgNgK/X+uWwaw0kynZJMU= -cloud.google.com/go/iam v1.0.1/go.mod h1:yR3tmSL8BcZB4bxByRv2jkSIahVmCtfKZwLYGBalRE8= -cloud.google.com/go/iap v1.4.0/go.mod h1:RGFwRJdihTINIe4wZ2iCP0zF/qu18ZwyKxrhMhygBEc= -cloud.google.com/go/iap v1.5.0/go.mod h1:UH/CGgKd4KyohZL5Pt0jSKE4m3FR51qg6FKQ/z/Ix9A= -cloud.google.com/go/iap v1.6.0/go.mod h1:NSuvI9C/j7UdjGjIde7t7HBz+QTwBcapPE07+sSRcLk= -cloud.google.com/go/ids v1.1.0/go.mod h1:WIuwCaYVOzHIj2OhN9HAwvW+DBdmUAdcWlFxRl+KubM= -cloud.google.com/go/ids v1.2.0/go.mod h1:5WXvp4n25S0rA/mQWAg1YEEBBq6/s+7ml1RDCW1IrcY= -cloud.google.com/go/ids v1.3.0/go.mod h1:JBdTYwANikFKaDP6LtW5JAi4gubs57SVNQjemdt6xV4= -cloud.google.com/go/iot v1.3.0/go.mod h1:r7RGh2B61+B8oz0AGE+J72AhA0G7tdXItODWsaA2oLs= -cloud.google.com/go/iot v1.4.0/go.mod h1:dIDxPOn0UvNDUMD8Ger7FIaTuvMkj+aGk94RPP0iV+g= -cloud.google.com/go/iot v1.5.0/go.mod h1:mpz5259PDl3XJthEmh9+ap0affn/MqNSP4My77Qql9o= -cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= -cloud.google.com/go/kms v1.5.0/go.mod h1:QJS2YY0eJGBg3mnDfuaCyLauWwBJiHRboYxJ++1xJNg= -cloud.google.com/go/kms v1.6.0/go.mod h1:Jjy850yySiasBUDi6KFUwUv2n1+o7QZFyuUJg6OgjA0= +cloud.google.com/go/kms v1.9.0 h1:b0votJQa/9DSsxgHwN33/tTLA7ZHVzfWhDCrfiXijSo= cloud.google.com/go/kms v1.9.0/go.mod h1:qb1tPTgfF9RQP8e1wq4cLFErVuTJv7UsSC915J8dh3w= -cloud.google.com/go/kms v1.10.2 h1:8UePKEypK3SQ6g+4mn/s/VgE5L7XOh+FwGGRUqvY3Hw= -cloud.google.com/go/kms v1.10.2/go.mod h1:9mX3Q6pdroWzL20pbK6RaOdBbXBEhMNgK4Pfz2bweb4= -cloud.google.com/go/language v1.4.0/go.mod h1:F9dRpNFQmJbkaop6g0JhSBXCNlO90e1KWx5iDdxbWic= -cloud.google.com/go/language v1.6.0/go.mod h1:6dJ8t3B+lUYfStgls25GusK04NLh3eDLQnWM3mdEbhI= -cloud.google.com/go/language v1.7.0/go.mod h1:DJ6dYN/W+SQOjF8e1hLQXMF21AkH2w9wiPzPCJa2MIE= -cloud.google.com/go/language v1.8.0/go.mod h1:qYPVHf7SPoNNiCL2Dr0FfEFNil1qi3pQEyygwpgVKB8= -cloud.google.com/go/language v1.9.0/go.mod h1:Ns15WooPM5Ad/5no/0n81yUetis74g3zrbeJBE+ptUY= -cloud.google.com/go/lifesciences v0.5.0/go.mod h1:3oIKy8ycWGPUyZDR/8RNnTOYevhaMLqh5vLUXs9zvT8= -cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6t/iPhY2Tyfu08= -cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo= -cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw= -cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M= -cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE= -cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc= cloud.google.com/go/longrunning v0.4.1 h1:v+yFJOfKC3yZdY6ZUI933pIYdhyhV8S3NpWrXWmg7jM= -cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo= -cloud.google.com/go/managedidentities v1.3.0/go.mod h1:UzlW3cBOiPrzucO5qWkNkh0w33KFtBJU281hacNvsdE= -cloud.google.com/go/managedidentities v1.4.0/go.mod h1:NWSBYbEMgqmbZsLIyKvxrYbtqOsxY1ZrGM+9RgDqInM= -cloud.google.com/go/managedidentities v1.5.0/go.mod h1:+dWcZ0JlUmpuxpIDfyP5pP5y0bLdRwOS4Lp7gMni/LA= -cloud.google.com/go/maps v0.1.0/go.mod h1:BQM97WGyfw9FWEmQMpZ5T6cpovXXSd1cGmFma94eubI= -cloud.google.com/go/maps v0.6.0/go.mod h1:o6DAMMfb+aINHz/p/jbcY+mYeXBoZoxTfdSQ8VAJaCw= -cloud.google.com/go/mediatranslation v0.5.0/go.mod h1:jGPUhGTybqsPQn91pNXw0xVHfuJ3leR1wj37oU3y1f4= -cloud.google.com/go/mediatranslation v0.6.0/go.mod h1:hHdBCTYNigsBxshbznuIMFNe5QXEowAuNmmC7h8pu5w= -cloud.google.com/go/mediatranslation v0.7.0/go.mod h1:LCnB/gZr90ONOIQLgSXagp8XUW1ODs2UmUMvcgMfI2I= -cloud.google.com/go/memcache v1.4.0/go.mod h1:rTOfiGZtJX1AaFUrOgsMHX5kAzaTQ8azHiuDoTPzNsE= -cloud.google.com/go/memcache v1.5.0/go.mod h1:dk3fCK7dVo0cUU2c36jKb4VqKPS22BTkf81Xq617aWM= -cloud.google.com/go/memcache v1.6.0/go.mod h1:XS5xB0eQZdHtTuTF9Hf8eJkKtR3pVRCcvJwtm68T3rA= -cloud.google.com/go/memcache v1.7.0/go.mod h1:ywMKfjWhNtkQTxrWxCkCFkoPjLHPW6A7WOTVI8xy3LY= -cloud.google.com/go/memcache v1.9.0/go.mod h1:8oEyzXCu+zo9RzlEaEjHl4KkgjlNDaXbCQeQWlzNFJM= -cloud.google.com/go/metastore v1.5.0/go.mod h1:2ZNrDcQwghfdtCwJ33nM0+GrBGlVuh8rakL3vdPY3XY= -cloud.google.com/go/metastore v1.6.0/go.mod h1:6cyQTls8CWXzk45G55x57DVQ9gWg7RiH65+YgPsNh9s= -cloud.google.com/go/metastore v1.7.0/go.mod h1:s45D0B4IlsINu87/AsWiEVYbLaIMeUSoxlKKDqBGFS8= -cloud.google.com/go/metastore v1.8.0/go.mod h1:zHiMc4ZUpBiM7twCIFQmJ9JMEkDSyZS9U12uf7wHqSI= -cloud.google.com/go/metastore v1.10.0/go.mod h1:fPEnH3g4JJAk+gMRnrAnoqyv2lpUCqJPWOodSaf45Eo= -cloud.google.com/go/monitoring v1.7.0/go.mod h1:HpYse6kkGo//7p6sT0wsIC6IBDET0RhIsnmlA53dvEk= -cloud.google.com/go/monitoring v1.8.0/go.mod h1:E7PtoMJ1kQXWxPjB6mv2fhC5/15jInuulFdYYtlcvT4= +cloud.google.com/go/monitoring v1.12.0 h1:+X79DyOP/Ny23XIqSIb37AvFWSxDN15w/ktklVvPLso= cloud.google.com/go/monitoring v1.12.0/go.mod h1:yx8Jj2fZNEkL/GYZyTLS4ZtZEZN8WtDEiEqG4kLK50w= -cloud.google.com/go/monitoring v1.13.0 h1:2qsrgXGVoRXpP7otZ14eE1I568zAa92sJSDPyOJvwjM= -cloud.google.com/go/monitoring v1.13.0/go.mod h1:k2yMBAB1H9JT/QETjNkgdCGD9bPF712XiLTVr+cBrpw= -cloud.google.com/go/networkconnectivity v1.4.0/go.mod h1:nOl7YL8odKyAOtzNX73/M5/mGZgqqMeryi6UPZTk/rA= -cloud.google.com/go/networkconnectivity v1.5.0/go.mod h1:3GzqJx7uhtlM3kln0+x5wyFvuVH1pIBJjhCpjzSt75o= -cloud.google.com/go/networkconnectivity v1.6.0/go.mod h1:OJOoEXW+0LAxHh89nXd64uGG+FbQoeH8DtxCHVOMlaM= -cloud.google.com/go/networkconnectivity v1.7.0/go.mod h1:RMuSbkdbPwNMQjB5HBWD5MpTBnNm39iAVpC3TmsExt8= -cloud.google.com/go/networkconnectivity v1.10.0/go.mod h1:UP4O4sWXJG13AqrTdQCD9TnLGEbtNRqjuaaA7bNjF5E= -cloud.google.com/go/networkmanagement v1.4.0/go.mod h1:Q9mdLLRn60AsOrPc8rs8iNV6OHXaGcDdsIQe1ohekq8= -cloud.google.com/go/networkmanagement v1.5.0/go.mod h1:ZnOeZ/evzUdUsnvRt792H0uYEnHQEMaz+REhhzJRcf4= -cloud.google.com/go/networkmanagement v1.6.0/go.mod h1:5pKPqyXjB/sgtvB5xqOemumoQNB7y95Q7S+4rjSOPYY= -cloud.google.com/go/networksecurity v0.5.0/go.mod h1:xS6fOCoqpVC5zx15Z/MqkfDwH4+m/61A3ODiDV1xmiQ= -cloud.google.com/go/networksecurity v0.6.0/go.mod h1:Q5fjhTr9WMI5mbpRYEbiexTzROf7ZbDzvzCrNl14nyU= -cloud.google.com/go/networksecurity v0.7.0/go.mod h1:mAnzoxx/8TBSyXEeESMy9OOYwo1v+gZ5eMRnsT5bC8k= -cloud.google.com/go/notebooks v1.2.0/go.mod h1:9+wtppMfVPUeJ8fIWPOq1UnATHISkGXGqTkxeieQ6UY= -cloud.google.com/go/notebooks v1.3.0/go.mod h1:bFR5lj07DtCPC7YAAJ//vHskFBxA5JzYlH68kXVdk34= -cloud.google.com/go/notebooks v1.4.0/go.mod h1:4QPMngcwmgb6uw7Po99B2xv5ufVoIQ7nOGDyL4P8AgA= -cloud.google.com/go/notebooks v1.5.0/go.mod h1:q8mwhnP9aR8Hpfnrc5iN5IBhrXUy8S2vuYs+kBJ/gu0= -cloud.google.com/go/notebooks v1.7.0/go.mod h1:PVlaDGfJgj1fl1S3dUwhFMXFgfYGhYQt2164xOMONmE= -cloud.google.com/go/optimization v1.1.0/go.mod h1:5po+wfvX5AQlPznyVEZjGJTMr4+CAkJf2XSTQOOl9l4= -cloud.google.com/go/optimization v1.2.0/go.mod h1:Lr7SOHdRDENsh+WXVmQhQTrzdu9ybg0NecjHidBq6xs= -cloud.google.com/go/optimization v1.3.1/go.mod h1:IvUSefKiwd1a5p0RgHDbWCIbDFgKuEdB+fPPuP0IDLI= -cloud.google.com/go/orchestration v1.3.0/go.mod h1:Sj5tq/JpWiB//X/q3Ngwdl5K7B7Y0KZ7bfv0wL6fqVA= -cloud.google.com/go/orchestration v1.4.0/go.mod h1:6W5NLFWs2TlniBphAViZEVhrXRSMgUGDfW7vrWKvsBk= -cloud.google.com/go/orchestration v1.6.0/go.mod h1:M62Bevp7pkxStDfFfTuCOaXgaaqRAga1yKyoMtEoWPQ= -cloud.google.com/go/orgpolicy v1.4.0/go.mod h1:xrSLIV4RePWmP9P3tBl8S93lTmlAxjm06NSm2UTmKvE= -cloud.google.com/go/orgpolicy v1.5.0/go.mod h1:hZEc5q3wzwXJaKrsx5+Ewg0u1LxJ51nNFlext7Tanwc= -cloud.google.com/go/orgpolicy v1.10.0/go.mod h1:w1fo8b7rRqlXlIJbVhOMPrwVljyuW5mqssvBtU18ONc= -cloud.google.com/go/osconfig v1.7.0/go.mod h1:oVHeCeZELfJP7XLxcBGTMBvRO+1nQ5tFG9VQTmYS2Fs= -cloud.google.com/go/osconfig v1.8.0/go.mod h1:EQqZLu5w5XA7eKizepumcvWx+m8mJUhEwiPqWiZeEdg= -cloud.google.com/go/osconfig v1.9.0/go.mod h1:Yx+IeIZJ3bdWmzbQU4fxNl8xsZ4amB+dygAwFPlvnNo= -cloud.google.com/go/osconfig v1.10.0/go.mod h1:uMhCzqC5I8zfD9zDEAfvgVhDS8oIjySWh+l4WK6GnWw= -cloud.google.com/go/osconfig v1.11.0/go.mod h1:aDICxrur2ogRd9zY5ytBLV89KEgT2MKB2L/n6x1ooPw= -cloud.google.com/go/oslogin v1.4.0/go.mod h1:YdgMXWRaElXz/lDk1Na6Fh5orF7gvmJ0FGLIs9LId4E= -cloud.google.com/go/oslogin v1.5.0/go.mod h1:D260Qj11W2qx/HVF29zBg+0fd6YCSjSqLUkY/qEenQU= -cloud.google.com/go/oslogin v1.6.0/go.mod h1:zOJ1O3+dTU8WPlGEkFSh7qeHPPSoxrcMbbK1Nm2iX70= -cloud.google.com/go/oslogin v1.7.0/go.mod h1:e04SN0xO1UNJ1M5GP0vzVBFicIe4O53FOfcixIqTyXo= -cloud.google.com/go/oslogin v1.9.0/go.mod h1:HNavntnH8nzrn8JCTT5fj18FuJLFJc4NaZJtBnQtKFs= -cloud.google.com/go/phishingprotection v0.5.0/go.mod h1:Y3HZknsK9bc9dMi+oE8Bim0lczMU6hrX0UpADuMefr0= -cloud.google.com/go/phishingprotection v0.6.0/go.mod h1:9Y3LBLgy0kDTcYET8ZH3bq/7qni15yVUoAxiFxnlSUA= -cloud.google.com/go/phishingprotection v0.7.0/go.mod h1:8qJI4QKHoda/sb/7/YmMQ2omRLSLYSu9bU0EKCNI+Lk= -cloud.google.com/go/policytroubleshooter v1.3.0/go.mod h1:qy0+VwANja+kKrjlQuOzmlvscn4RNsAc0e15GGqfMxg= -cloud.google.com/go/policytroubleshooter v1.4.0/go.mod h1:DZT4BcRw3QoO8ota9xw/LKtPa8lKeCByYeKTIf/vxdE= -cloud.google.com/go/policytroubleshooter v1.5.0/go.mod h1:Rz1WfV+1oIpPdN2VvvuboLVRsB1Hclg3CKQ53j9l8vw= -cloud.google.com/go/privatecatalog v0.5.0/go.mod h1:XgosMUvvPyxDjAVNDYxJ7wBW8//hLDDYmnsNcMGq1K0= -cloud.google.com/go/privatecatalog v0.6.0/go.mod h1:i/fbkZR0hLN29eEWiiwue8Pb+GforiEIBnV9yrRUOKI= -cloud.google.com/go/privatecatalog v0.7.0/go.mod h1:2s5ssIFO69F5csTXcwBP7NPFTZvps26xGzvQ2PQaBYg= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/pubsub v1.26.0/go.mod h1:QgBH3U/jdJy/ftjPhTkyXNj543Tin1pRYcdcPRnFIRI= -cloud.google.com/go/pubsub v1.27.1/go.mod h1:hQN39ymbV9geqBnfQq6Xf63yNhUAhv9CZhzp5O6qsW0= -cloud.google.com/go/pubsub v1.28.0/go.mod h1:vuXFpwaVoIPQMGXqRyUQigu/AX1S3IWugR9xznmcXX8= -cloud.google.com/go/pubsublite v1.5.0/go.mod h1:xapqNQ1CuLfGi23Yda/9l4bBCKz/wC3KIJ5gKcxveZg= -cloud.google.com/go/pubsublite v1.6.0/go.mod h1:1eFCS0U11xlOuMFV/0iBqw3zP12kddMeCbj/F3FSj9k= -cloud.google.com/go/recaptchaenterprise v1.3.1/go.mod h1:OdD+q+y4XGeAlxRaMn1Y7/GveP6zmq76byL6tjPE7d4= -cloud.google.com/go/recaptchaenterprise/v2 v2.1.0/go.mod h1:w9yVqajwroDNTfGuhmOjPDN//rZGySaf6PtFVcSCa7o= -cloud.google.com/go/recaptchaenterprise/v2 v2.2.0/go.mod h1:/Zu5jisWGeERrd5HnlS3EUGb/D335f9k51B/FVil0jk= -cloud.google.com/go/recaptchaenterprise/v2 v2.3.0/go.mod h1:O9LwGCjrhGHBQET5CA7dd5NwwNQUErSgEDit1DLNTdo= -cloud.google.com/go/recaptchaenterprise/v2 v2.4.0/go.mod h1:Am3LHfOuBstrLrNCBrlI5sbwx9LBg3te2N6hGvHn2mE= -cloud.google.com/go/recaptchaenterprise/v2 v2.5.0/go.mod h1:O8LzcHXN3rz0j+LBC91jrwI3R+1ZSZEWrfL7XHgNo9U= -cloud.google.com/go/recaptchaenterprise/v2 v2.6.0/go.mod h1:RPauz9jeLtB3JVzg6nCbe12qNoaa8pXc4d/YukAmcnA= -cloud.google.com/go/recommendationengine v0.5.0/go.mod h1:E5756pJcVFeVgaQv3WNpImkFP8a+RptV6dDLGPILjvg= -cloud.google.com/go/recommendationengine v0.6.0/go.mod h1:08mq2umu9oIqc7tDy8sx+MNJdLG0fUi3vaSVbztHgJ4= -cloud.google.com/go/recommendationengine v0.7.0/go.mod h1:1reUcE3GIu6MeBz/h5xZJqNLuuVjNg1lmWMPyjatzac= -cloud.google.com/go/recommender v1.5.0/go.mod h1:jdoeiBIVrJe9gQjwd759ecLJbxCDED4A6p+mqoqDvTg= -cloud.google.com/go/recommender v1.6.0/go.mod h1:+yETpm25mcoiECKh9DEScGzIRyDKpZ0cEhWGo+8bo+c= -cloud.google.com/go/recommender v1.7.0/go.mod h1:XLHs/W+T8olwlGOgfQenXBTbIseGclClff6lhFVe9Bs= -cloud.google.com/go/recommender v1.8.0/go.mod h1:PkjXrTT05BFKwxaUxQmtIlrtj0kph108r02ZZQ5FE70= -cloud.google.com/go/recommender v1.9.0/go.mod h1:PnSsnZY7q+VL1uax2JWkt/UegHssxjUVVCrX52CuEmQ= -cloud.google.com/go/redis v1.7.0/go.mod h1:V3x5Jq1jzUcg+UNsRvdmsfuFnit1cfe3Z/PGyq/lm4Y= -cloud.google.com/go/redis v1.8.0/go.mod h1:Fm2szCDavWzBk2cDKxrkmWBqoCiL1+Ctwq7EyqBCA/A= -cloud.google.com/go/redis v1.9.0/go.mod h1:HMYQuajvb2D0LvMgZmLDZW8V5aOC/WxstZHiy4g8OiA= -cloud.google.com/go/redis v1.10.0/go.mod h1:ThJf3mMBQtW18JzGgh41/Wld6vnDDc/F/F35UolRZPM= -cloud.google.com/go/redis v1.11.0/go.mod h1:/X6eicana+BWcUda5PpwZC48o37SiFVTFSs0fWAJ7uQ= -cloud.google.com/go/resourcemanager v1.3.0/go.mod h1:bAtrTjZQFJkiWTPDb1WBjzvc6/kifjj4QBYuKCCoqKA= -cloud.google.com/go/resourcemanager v1.4.0/go.mod h1:MwxuzkumyTX7/a3n37gmsT3py7LIXwrShilPh3P1tR0= -cloud.google.com/go/resourcemanager v1.5.0/go.mod h1:eQoXNAiAvCf5PXxWxXjhKQoTMaUSNrEfg+6qdf/wots= -cloud.google.com/go/resourcesettings v1.3.0/go.mod h1:lzew8VfESA5DQ8gdlHwMrqZs1S9V87v3oCnKCWoOuQU= -cloud.google.com/go/resourcesettings v1.4.0/go.mod h1:ldiH9IJpcrlC3VSuCGvjR5of/ezRrOxFtpJoJo5SmXg= -cloud.google.com/go/resourcesettings v1.5.0/go.mod h1:+xJF7QSG6undsQDfsCJyqWXyBwUoJLhetkRMDRnIoXA= -cloud.google.com/go/retail v1.8.0/go.mod h1:QblKS8waDmNUhghY2TI9O3JLlFk8jybHeV4BF19FrE4= -cloud.google.com/go/retail v1.9.0/go.mod h1:g6jb6mKuCS1QKnH/dpu7isX253absFl6iE92nHwlBUY= -cloud.google.com/go/retail v1.10.0/go.mod h1:2gDk9HsL4HMS4oZwz6daui2/jmKvqShXKQuB2RZ+cCc= -cloud.google.com/go/retail v1.11.0/go.mod h1:MBLk1NaWPmh6iVFSz9MeKG/Psyd7TAgm6y/9L2B4x9Y= -cloud.google.com/go/retail v1.12.0/go.mod h1:UMkelN/0Z8XvKymXFbD4EhFJlYKRx1FGhQkVPU5kF14= -cloud.google.com/go/run v0.2.0/go.mod h1:CNtKsTA1sDcnqqIFR3Pb5Tq0usWxJJvsWOCPldRU3Do= -cloud.google.com/go/run v0.3.0/go.mod h1:TuyY1+taHxTjrD0ZFk2iAR+xyOXEA0ztb7U3UNA0zBo= -cloud.google.com/go/run v0.8.0/go.mod h1:VniEnuBwqjigv0A7ONfQUaEItaiCRVujlMqerPPiktM= -cloud.google.com/go/scheduler v1.4.0/go.mod h1:drcJBmxF3aqZJRhmkHQ9b3uSSpQoltBPGPxGAWROx6s= -cloud.google.com/go/scheduler v1.5.0/go.mod h1:ri073ym49NW3AfT6DZi21vLZrG07GXr5p3H1KxN5QlI= -cloud.google.com/go/scheduler v1.6.0/go.mod h1:SgeKVM7MIwPn3BqtcBntpLyrIJftQISRrYB5ZtT+KOk= -cloud.google.com/go/scheduler v1.7.0/go.mod h1:jyCiBqWW956uBjjPMMuX09n3x37mtyPJegEWKxRsn44= -cloud.google.com/go/scheduler v1.8.0/go.mod h1:TCET+Y5Gp1YgHT8py4nlg2Sew8nUHMqcpousDgXJVQc= -cloud.google.com/go/secretmanager v1.6.0/go.mod h1:awVa/OXF6IiyaU1wQ34inzQNc4ISIDIrId8qE5QGgKA= -cloud.google.com/go/secretmanager v1.8.0/go.mod h1:hnVgi/bN5MYHd3Gt0SPuTPPp5ENina1/LxM+2W9U9J4= -cloud.google.com/go/secretmanager v1.9.0/go.mod h1:b71qH2l1yHmWQHt9LC80akm86mX8AL6X1MA01dW8ht4= -cloud.google.com/go/secretmanager v1.10.0/go.mod h1:MfnrdvKMPNra9aZtQFvBcvRU54hbPD8/HayQdlUgJpU= -cloud.google.com/go/security v1.5.0/go.mod h1:lgxGdyOKKjHL4YG3/YwIL2zLqMFCKs0UbQwgyZmfJl4= -cloud.google.com/go/security v1.7.0/go.mod h1:mZklORHl6Bg7CNnnjLH//0UlAlaXqiG7Lb9PsPXLfD0= -cloud.google.com/go/security v1.8.0/go.mod h1:hAQOwgmaHhztFhiQ41CjDODdWP0+AE1B3sX4OFlq+GU= -cloud.google.com/go/security v1.9.0/go.mod h1:6Ta1bO8LXI89nZnmnsZGp9lVoVWXqsVbIq/t9dzI+2Q= -cloud.google.com/go/security v1.10.0/go.mod h1:QtOMZByJVlibUT2h9afNDWRZ1G96gVywH8T5GUSb9IA= -cloud.google.com/go/security v1.12.0/go.mod h1:rV6EhrpbNHrrxqlvW0BWAIawFWq3X90SduMJdFwtLB8= -cloud.google.com/go/securitycenter v1.13.0/go.mod h1:cv5qNAqjY84FCN6Y9z28WlkKXyWsgLO832YiWwkCWcU= -cloud.google.com/go/securitycenter v1.14.0/go.mod h1:gZLAhtyKv85n52XYWt6RmeBdydyxfPeTrpToDPw4Auc= -cloud.google.com/go/securitycenter v1.15.0/go.mod h1:PeKJ0t8MoFmmXLXWm41JidyzI3PJjd8sXWaVqg43WWk= -cloud.google.com/go/securitycenter v1.16.0/go.mod h1:Q9GMaLQFUD+5ZTabrbujNWLtSLZIZF7SAR0wWECrjdk= -cloud.google.com/go/securitycenter v1.18.1/go.mod h1:0/25gAzCM/9OL9vVx4ChPeM/+DlfGQJDwBy/UC8AKK0= -cloud.google.com/go/servicecontrol v1.4.0/go.mod h1:o0hUSJ1TXJAmi/7fLJAedOovnujSEvjKCAFNXPQ1RaU= -cloud.google.com/go/servicecontrol v1.5.0/go.mod h1:qM0CnXHhyqKVuiZnGKrIurvVImCs8gmqWsDoqe9sU1s= -cloud.google.com/go/servicecontrol v1.11.0/go.mod h1:kFmTzYzTUIuZs0ycVqRHNaNhgR+UMUpw9n02l/pY+mc= -cloud.google.com/go/servicedirectory v1.4.0/go.mod h1:gH1MUaZCgtP7qQiI+F+A+OpeKF/HQWgtAddhTbhL2bs= -cloud.google.com/go/servicedirectory v1.5.0/go.mod h1:QMKFL0NUySbpZJ1UZs3oFAmdvVxhhxB6eJ/Vlp73dfg= -cloud.google.com/go/servicedirectory v1.6.0/go.mod h1:pUlbnWsLH9c13yGkxCmfumWEPjsRs1RlmJ4pqiNjVL4= -cloud.google.com/go/servicedirectory v1.7.0/go.mod h1:5p/U5oyvgYGYejufvxhgwjL8UVXjkuw7q5XcG10wx1U= -cloud.google.com/go/servicedirectory v1.8.0/go.mod h1:srXodfhY1GFIPvltunswqXpVxFPpZjf8nkKQT7XcXaY= -cloud.google.com/go/servicemanagement v1.4.0/go.mod h1:d8t8MDbezI7Z2R1O/wu8oTggo3BI2GKYbdG4y/SJTco= -cloud.google.com/go/servicemanagement v1.5.0/go.mod h1:XGaCRe57kfqu4+lRxaFEAuqmjzF0r+gWHjWqKqBvKFo= -cloud.google.com/go/servicemanagement v1.6.0/go.mod h1:aWns7EeeCOtGEX4OvZUWCCJONRZeFKiptqKf1D0l/Jc= -cloud.google.com/go/serviceusage v1.3.0/go.mod h1:Hya1cozXM4SeSKTAgGXgj97GlqUvF5JaoXacR1JTP/E= -cloud.google.com/go/serviceusage v1.4.0/go.mod h1:SB4yxXSaYVuUBYUml6qklyONXNLt83U0Rb+CXyhjEeU= -cloud.google.com/go/serviceusage v1.5.0/go.mod h1:w8U1JvqUqwJNPEOTQjrMHkw3IaIFLoLsPLvsE3xueec= -cloud.google.com/go/shell v1.3.0/go.mod h1:VZ9HmRjZBsjLGXusm7K5Q5lzzByZmJHf1d0IWHEN5X4= -cloud.google.com/go/shell v1.4.0/go.mod h1:HDxPzZf3GkDdhExzD/gs8Grqk+dmYcEjGShZgYa9URw= -cloud.google.com/go/shell v1.6.0/go.mod h1:oHO8QACS90luWgxP3N9iZVuEiSF84zNyLytb+qE2f9A= -cloud.google.com/go/spanner v1.41.0/go.mod h1:MLYDBJR/dY4Wt7ZaMIQ7rXOTLjYrmxLE/5ve9vFfWos= +cloud.google.com/go/spanner v1.44.0 h1:fba7k2apz4aI0BE59/kbeaJ78dPOXSz2PSuBIfe7SBM= cloud.google.com/go/spanner v1.44.0/go.mod h1:G8XIgYdOK+Fbcpbs7p2fiprDw4CaZX63whnSMLVBxjk= -cloud.google.com/go/spanner v1.45.0 h1:7VdjZ8zj4sHbDw55atp5dfY6kn1j9sam9DRNpPQhqR4= -cloud.google.com/go/spanner v1.45.0/go.mod h1:FIws5LowYz8YAE1J8fOS7DJup8ff7xJeetWEo5REA2M= -cloud.google.com/go/speech v1.6.0/go.mod h1:79tcr4FHCimOp56lwC01xnt/WPJZc4v3gzyT7FoBkCM= -cloud.google.com/go/speech v1.7.0/go.mod h1:KptqL+BAQIhMsj1kOP2la5DSEEerPDuOP/2mmkhHhZQ= -cloud.google.com/go/speech v1.8.0/go.mod h1:9bYIl1/tjsAnMgKGHKmBZzXKEkGgtU+MpdDPTE9f7y0= -cloud.google.com/go/speech v1.9.0/go.mod h1:xQ0jTcmnRFFM2RfX/U+rk6FQNUF6DQlydUSyoooSpco= -cloud.google.com/go/speech v1.14.1/go.mod h1:gEosVRPJ9waG7zqqnsHpYTOoAS4KouMRLDFMekpJ0J0= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= -cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc= -cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi80i/iqR2s= cloud.google.com/go/storage v1.28.1 h1:F5QDG5ChchaAVQhINh24U99OWHURqrW8OmQcGKXcbgI= cloud.google.com/go/storage v1.28.1/go.mod h1:Qnisd4CqDdo6BGs2AD5LLnEsmSQ80wQ5ogcBBKhU86Y= -cloud.google.com/go/storagetransfer v1.5.0/go.mod h1:dxNzUopWy7RQevYFHewchb29POFv3/AaBgnhqzqiK0w= -cloud.google.com/go/storagetransfer v1.6.0/go.mod h1:y77xm4CQV/ZhFZH75PLEXY0ROiS7Gh6pSKrM8dJyg6I= -cloud.google.com/go/storagetransfer v1.7.0/go.mod h1:8Giuj1QNb1kfLAiWM1bN6dHzfdlDAVC9rv9abHot2W4= -cloud.google.com/go/talent v1.1.0/go.mod h1:Vl4pt9jiHKvOgF9KoZo6Kob9oV4lwd/ZD5Cto54zDRw= -cloud.google.com/go/talent v1.2.0/go.mod h1:MoNF9bhFQbiJ6eFD3uSsg0uBALw4n4gaCaEjBw9zo8g= -cloud.google.com/go/talent v1.3.0/go.mod h1:CmcxwJ/PKfRgd1pBjQgU6W3YBwiewmUzQYH5HHmSCmM= -cloud.google.com/go/talent v1.4.0/go.mod h1:ezFtAgVuRf8jRsvyE6EwmbTK5LKciD4KVnHuDEFmOOA= -cloud.google.com/go/talent v1.5.0/go.mod h1:G+ODMj9bsasAEJkQSzO2uHQWXHHXUomArjWQQYkqK6c= -cloud.google.com/go/texttospeech v1.4.0/go.mod h1:FX8HQHA6sEpJ7rCMSfXuzBcysDAuWusNNNvN9FELDd8= -cloud.google.com/go/texttospeech v1.5.0/go.mod h1:oKPLhR4n4ZdQqWKURdwxMy0uiTS1xU161C8W57Wkea4= -cloud.google.com/go/texttospeech v1.6.0/go.mod h1:YmwmFT8pj1aBblQOI3TfKmwibnsfvhIBzPXcW4EBovc= -cloud.google.com/go/tpu v1.3.0/go.mod h1:aJIManG0o20tfDQlRIej44FcwGGl/cD0oiRyMKG19IQ= -cloud.google.com/go/tpu v1.4.0/go.mod h1:mjZaX8p0VBgllCzF6wcU2ovUXN9TONFLd7iz227X2Xg= -cloud.google.com/go/tpu v1.5.0/go.mod h1:8zVo1rYDFuW2l4yZVY0R0fb/v44xLh3llq7RuV61fPM= -cloud.google.com/go/trace v1.3.0/go.mod h1:FFUE83d9Ca57C+K8rDl/Ih8LwOzWIV1krKgxg6N0G28= -cloud.google.com/go/trace v1.4.0/go.mod h1:UG0v8UBqzusp+z63o7FK74SdFE+AXpCLdFb1rshXG+Y= -cloud.google.com/go/trace v1.8.0/go.mod h1:zH7vcsbAhklH8hWFig58HvxcxyQbaIqMarMg9hn5ECA= -cloud.google.com/go/translate v1.3.0/go.mod h1:gzMUwRjvOqj5i69y/LYLd8RrNQk+hOmIXTi9+nb3Djs= -cloud.google.com/go/translate v1.4.0/go.mod h1:06Dn/ppvLD6WvA5Rhdp029IX2Mi3Mn7fpMRLPvXT5Wg= -cloud.google.com/go/translate v1.6.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV8qlkHZEyos= -cloud.google.com/go/video v1.8.0/go.mod h1:sTzKFc0bUSByE8Yoh8X0mn8bMymItVGPfTuUBUyRgxk= -cloud.google.com/go/video v1.9.0/go.mod h1:0RhNKFRF5v92f8dQt0yhaHrEuH95m068JYOvLZYnJSw= -cloud.google.com/go/video v1.13.0/go.mod h1:ulzkYlYgCp15N2AokzKjy7MQ9ejuynOJdf1tR5lGthk= -cloud.google.com/go/videointelligence v1.6.0/go.mod h1:w0DIDlVRKtwPCn/C4iwZIJdvC69yInhW0cfi+p546uU= -cloud.google.com/go/videointelligence v1.7.0/go.mod h1:k8pI/1wAhjznARtVT9U1llUaFNPh7muw8QyOUpavru4= -cloud.google.com/go/videointelligence v1.8.0/go.mod h1:dIcCn4gVDdS7yte/w+koiXn5dWVplOZkE+xwG9FgK+M= -cloud.google.com/go/videointelligence v1.9.0/go.mod h1:29lVRMPDYHikk3v8EdPSaL8Ku+eMzDljjuvRs105XoU= -cloud.google.com/go/videointelligence v1.10.0/go.mod h1:LHZngX1liVtUhZvi2uNS0VQuOzNi2TkY1OakiuoUOjU= -cloud.google.com/go/vision v1.2.0/go.mod h1:SmNwgObm5DpFBme2xpyOyasvBc1aPdjvMk2bBk0tKD0= -cloud.google.com/go/vision/v2 v2.2.0/go.mod h1:uCdV4PpN1S0jyCyq8sIM42v2Y6zOLkZs+4R9LrGYwFo= -cloud.google.com/go/vision/v2 v2.3.0/go.mod h1:UO61abBx9QRMFkNBbf1D8B1LXdS2cGiiCRx0vSpZoUo= -cloud.google.com/go/vision/v2 v2.4.0/go.mod h1:VtI579ll9RpVTrdKdkMzckdnwMyX2JILb+MhPqRbPsY= -cloud.google.com/go/vision/v2 v2.5.0/go.mod h1:MmaezXOOE+IWa+cS7OhRRLK2cNv1ZL98zhqFFZaaH2E= -cloud.google.com/go/vision/v2 v2.6.0/go.mod h1:158Hes0MvOS9Z/bDMSFpjwsUrZ5fPrdwuyyvKSGAGMY= -cloud.google.com/go/vmmigration v1.2.0/go.mod h1:IRf0o7myyWFSmVR1ItrBSFLFD/rJkfDCUTO4vLlJvsE= -cloud.google.com/go/vmmigration v1.3.0/go.mod h1:oGJ6ZgGPQOFdjHuocGcLqX4lc98YQ7Ygq8YQwHh9A7g= -cloud.google.com/go/vmmigration v1.5.0/go.mod h1:E4YQ8q7/4W9gobHjQg4JJSgXXSgY21nA5r8swQV+Xxc= -cloud.google.com/go/vmwareengine v0.1.0/go.mod h1:RsdNEf/8UDvKllXhMz5J40XxDrNJNN4sagiox+OI208= -cloud.google.com/go/vmwareengine v0.2.2/go.mod h1:sKdctNJxb3KLZkE/6Oui94iw/xs9PRNC2wnNLXsHvH8= -cloud.google.com/go/vpcaccess v1.4.0/go.mod h1:aQHVbTWDYUR1EbTApSVvMq1EnT57ppDmQzZ3imqIk4w= -cloud.google.com/go/vpcaccess v1.5.0/go.mod h1:drmg4HLk9NkZpGfCmZ3Tz0Bwnm2+DKqViEpeEpOq0m8= -cloud.google.com/go/vpcaccess v1.6.0/go.mod h1:wX2ILaNhe7TlVa4vC5xce1bCnqE3AeH27RV31lnmZes= -cloud.google.com/go/webrisk v1.4.0/go.mod h1:Hn8X6Zr+ziE2aNd8SliSDWpEnSS1u4R9+xXZmFiHmGE= -cloud.google.com/go/webrisk v1.5.0/go.mod h1:iPG6fr52Tv7sGk0H6qUFzmL3HHZev1htXuWDEEsqMTg= -cloud.google.com/go/webrisk v1.6.0/go.mod h1:65sW9V9rOosnc9ZY7A7jsy1zoHS5W9IAXv6dGqhMQMc= -cloud.google.com/go/webrisk v1.7.0/go.mod h1:mVMHgEYH0r337nmt1JyLthzMr6YxwN1aAIEc2fTcq7A= -cloud.google.com/go/webrisk v1.8.0/go.mod h1:oJPDuamzHXgUc+b8SiHRcVInZQuybnvEW72PqTc7sSg= -cloud.google.com/go/websecurityscanner v1.3.0/go.mod h1:uImdKm2wyeXQevQJXeh8Uun/Ym1VqworNDlBXQevGMo= -cloud.google.com/go/websecurityscanner v1.4.0/go.mod h1:ebit/Fp0a+FWu5j4JOmJEV8S8CzdTkAS77oDsiSqYWQ= -cloud.google.com/go/websecurityscanner v1.5.0/go.mod h1:Y6xdCPy81yi0SQnDY1xdNTNpfY1oAgXUlcfN3B3eSng= -cloud.google.com/go/workflows v1.6.0/go.mod h1:6t9F5h/unJz41YqfBmqSASJSXccBLtD1Vwf+KmJENM0= -cloud.google.com/go/workflows v1.7.0/go.mod h1:JhSrZuVZWuiDfKEFxU0/F1PQjmpnpcoISEXH2bcHC3M= -cloud.google.com/go/workflows v1.8.0/go.mod h1:ysGhmEajwZxGn1OhGOGKsTXc5PyxOc0vfKf5Af+to4M= -cloud.google.com/go/workflows v1.9.0/go.mod h1:ZGkj1aFIOd9c8Gerkjjq7OW7I5+l6cSvT3ujaO/WwSA= -cloud.google.com/go/workflows v1.10.0/go.mod h1:fZ8LmRmZQWacon9UCX1r/g/DfAXx5VcPALq2CxzdePw= code.cloudfoundry.org/gofileutils v0.0.0-20170111115228-4d0c80011a0f h1:UrKzEwTgeiff9vxdrfdqxibzpWjxLnuXDI5m6z3GJAk= code.cloudfoundry.org/gofileutils v0.0.0-20170111115228-4d0c80011a0f/go.mod h1:sk5LnIjB/nIEU7yP5sDQExVm62wu0pBh3yrElngUisI= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8= -git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc= -github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= -github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= -github.com/99designs/keyring v1.2.1/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA= -github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0= -github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk= -github.com/AdaLogics/go-fuzz-headers v0.0.0-20210715213245-6c3934b029d8/go.mod h1:CzsSbkDixRphAF5hS6wbMKq0eI6ccJRb7/A0M6JBnwg= -github.com/AdaLogics/go-fuzz-headers v0.0.0-20221206110420-d395f97c4830/go.mod h1:VzwV+t+dZ9j/H867F1M2ziD+yLHtB46oM35FxxMJ4d0= -github.com/AdaLogics/go-fuzz-headers v0.0.0-20230106234847-43070de90fa1/go.mod h1:VzwV+t+dZ9j/H867F1M2ziD+yLHtB46oM35FxxMJ4d0= -github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20221215162035-5330a85ea652/go.mod h1:OahwfttHWG6eJ0clwcfBAHoDI6X/LV/15hx/wlMZSrU= github.com/Azure/azure-pipeline-go v0.2.3 h1:7U9HBg1JFK3jHl5qmo4CTZKFTVgMwdFHMVtCdfBE21U= github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k= github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v44.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go v56.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v67.2.0+incompatible h1:Uu/Ww6ernvPTrpq31kITVTIm/I5jlJ1wjtEH/bmSB2k= github.com/Azure/azure-sdk-for-go v67.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go/sdk/azcore v0.19.0/go.mod h1:h6H6c8enJmmocHUbLiiGY6sx7f9i+X3m1CHdd5c6Rdw= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 h1:8kDqDngH+DmVBiCtIjCFTGa7MBnsIOkF9IccInFEbjk= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.1 h1:gVXuXcWd1i4C2Ruxe321aU+IKGaStvGB/S90PUPB/W8= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.1/go.mod h1:DffdKW9RFqa5VgmsjUOsS7UE7eiA5iAvYUs63bhKQ0M= github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJcghJGOYCgdezslRSVzqwLf/q+4Y2r/0= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 h1:vcYCAze6p19qBW7MhZybIsqD8sMV8js0NyQM8JDnVtg= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0/go.mod h1:OQeznEEkTZ9OrhHJoDD8ZDq51FHgXjqtP9z6bEwBq9U= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.1 h1:T8quHYlUGyb/oqtSTwqlCr1ilJHrDv+ZtpSfo+hm1BU= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.1/go.mod h1:gLa1CL2RNE4s7M3yopJ/p0iq5DdY6Yv5ZUt9MTRZOQM= github.com/Azure/azure-sdk-for-go/sdk/internal v0.7.0/go.mod h1:yqy467j36fJxcRV2TzfVZ1pCb5vxm4BtZPUdYWe/Xo8= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v4 v4.2.1 h1:UPeCRD+XY7QlaGQte2EVI2iOcWvUYA2XY8w5T/8v0NQ= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v4 v4.2.1/go.mod h1:oGV6NlB0cvi1ZbYRR2UN44QHxWFyGk+iylgD0qaMXjA= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.1.2 h1:mLY+pNLjCUeKhgnAJWAKhEUQM+RJQo2H1fuGSw1Ky1E= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/managementgroups/armmanagementgroups v1.0.0 h1:pPvTJ1dY0sA35JOeFq6TsY2xj6Z85Yo23Pj4wCCvu4o= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/msi/armmsi v1.1.0 h1:Q707jfTFqfunSnh73YkCBDXR3GQJKno3chPRxXw//ho= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/msi/armmsi v1.1.0/go.mod h1:vjoxsjVnPwhjHZw4PuuhpgYlcxWl5tyNedLHUl0ulFA= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 h1:+5VZ72z0Qan5Bog5C+ZkgSqUbeVUd9wgtHOrIKuc5b8= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v3 v3.0.1 h1:H3g2mkmu105ON0c/Gqx3Bm+bzoIijLom8LmV9Gjn7X0= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v4 v4.1.0 h1:Vjq3Uy3JAU1DTxbA+uX6BegIhgO2pyFltbfbmDa9KdI= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v4 v4.1.0/go.mod h1:Q3u+T/qw3Kb1Wf3DFKiFwEZlyaAyPb4yBgWm9wq7yh8= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.0.0 h1:lMW1lD/17LUA5z1XTURo7LcVG2ICBPlyMHjIUrcFZNQ= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/msi/armmsi v1.0.0 h1:ZOt3s8LxEoRGgdD/k7Co4wGAWKmO4+jdPRCRBa8Rzc0= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/msi/armmsi v1.0.0/go.mod h1:ZJWUTTEMZLTJI4PPI6vuv/OCEs9YjEX9EqjCnLJ8afA= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/network/armnetwork v1.0.0 h1:nBy98uKOIfun5z6wx6jwWLrULcM0+cjBalBFZlEZ7CA= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.1.1 h1:7CBQ+Ei8SP2c6ydQTGCCrS35bDxgTMfoP2miAwK++OU= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.1.1/go.mod h1:c/wcGeGx5FUPbM/JltUYHZcKmigwyVLJlDq+4HdtXaw= -github.com/Azure/azure-storage-blob-go v0.15.0 h1:rXtgp8tN1p29GvpGgfJetavIG0V7OgcSXPpwp3tx6qk= -github.com/Azure/azure-storage-blob-go v0.15.0/go.mod h1:vbjsVbX0dlxnRc4FFMPsS9BsJWPcne7GB7onqlPvz58= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0 h1:ECsQtyERDVz3NP3kvDOTLvbQhqWp/x9EsGKtb4ogUr8= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0/go.mod h1:s1tW/At+xHqjNFvWU4G0c0Qv33KOhvbGNj0RCTQDV8s= +github.com/Azure/azure-storage-blob-go v0.14.0 h1:1BCg74AmVdYwO3dlKwtFU1V0wU2PZdREkXvAmZJRUlM= +github.com/Azure/azure-storage-blob-go v0.14.0/go.mod h1:SMqIBi+SuiQH32bvyjngEewEeXoPfKMgWlBDaYf6fck= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= -github.com/Azure/go-ansiterm v0.0.0-20210608223527-2377c96fe795/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= -github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest v0.11.0/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= github.com/Azure/go-autorest/autorest v0.11.1/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= -github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= github.com/Azure/go-autorest/autorest v0.11.24/go.mod h1:G6kyRlFnTuSbEYkQGawPfsCswgme4iYf6rfSKUDzbCc= -github.com/Azure/go-autorest/autorest v0.11.29 h1:I4+HL/JDvErx2LjyzaVxllw2lRDB5/BT2Bm4g20iqYw= -github.com/Azure/go-autorest/autorest v0.11.29/go.mod h1:ZtEzC4Jy2JDrZLxvWs8LrBWEBycl1hbT1eknI8MtfAs= +github.com/Azure/go-autorest/autorest v0.11.28 h1:ndAExarwr5Y+GaHE6VCaY1kyS/HwwGGyuimVhWsHOEM= +github.com/Azure/go-autorest/autorest v0.11.28/go.mod h1:MrkzG3Y3AH668QyF9KRk5neJnGgmhQ6krbhR8Q5eMvA= github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg= github.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A= github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= -github.com/Azure/go-autorest/autorest/adal v0.9.22 h1:/GblQdIudfEM3AWWZ0mrYJQSd7JS4S/Mbzh6F0ov0Xc= -github.com/Azure/go-autorest/autorest/adal v0.9.22/go.mod h1:XuAbAEUv2Tta//+voMI038TrJBqjKam0me7qR+L8Cmk= +github.com/Azure/go-autorest/autorest/adal v0.9.20 h1:gJ3E98kMpFB1MFqQCvA1yFab8vthOeD4VlFRQULxahg= +github.com/Azure/go-autorest/autorest/adal v0.9.20/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= github.com/Azure/go-autorest/autorest/azure/auth v0.5.0/go.mod h1:QRTvSZQpxqm8mSErhnbI+tANIBAKP7B+UIE2z4ypUO0= github.com/Azure/go-autorest/autorest/azure/auth v0.5.12 h1:wkAZRgT/pn8HhFyzfe9UnqOjJYqlembgCTi72Bm/xKk= github.com/Azure/go-autorest/autorest/azure/auth v0.5.12/go.mod h1:84w/uV8E37feW2NCJ08uT9VBfjfUHpgLVnG2InYD6cg= @@ -616,36 +126,29 @@ github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZ github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= +github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c h1:/IBSNwUN8+eKzUzbJPqhK839ygXJ82sde8x3ogr6R28= github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= -github.com/Azure/go-ntlmssp v0.0.0-20220621081337-cb9428e4ac1e/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= -github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8= -github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= -github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0 h1:OBhqkivkhkMqLPymWEppkm7vgPQY2XsHoEkaMQ0AdZY= -github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0/go.mod h1:kgDmCTgBzIEPFElEF+FK0SdjAor06dRq2Go927dnQ6o= +github.com/AzureAD/microsoft-authentication-library-for-go v0.8.1 h1:oPdPEZFSbl7oSPEAIPMPBMUmiL+mqgzBJwM/9qYcwNg= +github.com/AzureAD/microsoft-authentication-library-for-go v0.8.1/go.mod h1:4qFor3D/HDsvBME35Xy9rwW9DecL+M2sNw1ybjPtwA0= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= -github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.2.0 h1:Rt8g24XnyGTyglgET/PRUNlrUeu9F5L+7FilkXfZgs0= +github.com/BurntSushi/toml v1.2.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DataDog/datadog-go v2.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/DataDog/datadog-go v3.2.0+incompatible h1:qSG2N4FghB1He/r2mFrWKCaL7dXCilEuNEeAn20fdD4= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/Jeffail/gabs v1.1.1 h1:V0uzR08Hj22EX8+8QMhyI9sX2hwRu+/RJhJUmnwda/E= github.com/Jeffail/gabs v1.1.1/go.mod h1:6xMvQMK4k33lb7GUUpaAPh6nKMmemQeg5d4gn7/bOXc= -github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= -github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= -github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60= github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= -github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= -github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw= @@ -654,11 +157,10 @@ github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugX github.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.4.17-0.20210324224401-5516f17a5958/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.4.17/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.5.1/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.5.2 h1:a9IhgEQBCUEk6QCdml9CiJGhAws+YwffDHEMp1VMrpA= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= -github.com/Microsoft/go-winio v0.6.0/go.mod h1:cTAf44im0RAYeL23bpB+fzCyDH2MJiz2BO69KH/soAE= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/hcsshim v0.8.6/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= github.com/Microsoft/hcsshim v0.8.7-0.20190325164909-8abdbb8205e4/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= github.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ= @@ -666,15 +168,9 @@ github.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg3 github.com/Microsoft/hcsshim v0.8.14/go.mod h1:NtVKoYxQuTLx6gEq0L96c9Ju4JbRJ4nY2ow3VK6a9Lg= github.com/Microsoft/hcsshim v0.8.15/go.mod h1:x38A4YbHbdxJtc0sF6oIz+RG0npwSCAvn69iY6URG00= github.com/Microsoft/hcsshim v0.8.16/go.mod h1:o5/SZqmR7x9JNKsW3pu+nqHm0MF8vbA+VxGOoXdC600= -github.com/Microsoft/hcsshim v0.8.20/go.mod h1:+w2gRZ5ReXQhFOrvSQeNfhrYB/dg3oDwTOcER2fw4I4= github.com/Microsoft/hcsshim v0.8.21/go.mod h1:+w2gRZ5ReXQhFOrvSQeNfhrYB/dg3oDwTOcER2fw4I4= -github.com/Microsoft/hcsshim v0.8.23/go.mod h1:4zegtUJth7lAvFyc6cH2gGQ5B3OFQim01nnU2M8jKDg= -github.com/Microsoft/hcsshim v0.9.2/go.mod h1:7pLA8lDk46WKDWlVsENo92gC0XFa8rbKfyFRBqxEbCc= -github.com/Microsoft/hcsshim v0.9.3/go.mod h1:7pLA8lDk46WKDWlVsENo92gC0XFa8rbKfyFRBqxEbCc= -github.com/Microsoft/hcsshim v0.9.4/go.mod h1:7pLA8lDk46WKDWlVsENo92gC0XFa8rbKfyFRBqxEbCc= -github.com/Microsoft/hcsshim v0.9.6/go.mod h1:7pLA8lDk46WKDWlVsENo92gC0XFa8rbKfyFRBqxEbCc= -github.com/Microsoft/hcsshim v0.10.0-rc.7 h1:HBytQPxcv8Oy4244zbQbe6hnOnx544eL5QPUqhJldz8= -github.com/Microsoft/hcsshim v0.10.0-rc.7/go.mod h1:ILuwjA+kNW+MrN/w5un7n3mTqkwsFu4Bp05/okFUZlE= +github.com/Microsoft/hcsshim v0.9.0 h1:BBgYMxl5YZDZVIijz02AlDINpYZOzQqRNCl9CZM13vk= +github.com/Microsoft/hcsshim v0.9.0/go.mod h1:VBJWdC71NSWPlEo7lwde1aL21748J8B6Sdgno7NqEGE= github.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5hlzMzRKMLyo42nCZ9oml8AdTlq/0cvIaBv6tK1RehU= github.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= @@ -683,13 +179,14 @@ github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMo github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= github.com/ProtonMail/go-crypto v0.0.0-20220824120805-4b6e5c587895 h1:NsReiLpErIPzRrnogAXYwSoU7txA977LjDGrbkewJbg= github.com/ProtonMail/go-crypto v0.0.0-20220824120805-4b6e5c587895/go.mod h1:UBYPn8k0D56RtnR8RFQMjmh4KrZzWJ5o7Z9SYjossQ8= github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/SAP/go-hdb v0.14.1 h1:hkw4ozGZ/i4eak7ZuGkY5e0hxiXFdNUBNhr4AvZVNFE= github.com/SAP/go-hdb v0.14.1/go.mod h1:7fdQLVC2lER3urZLjZCm0AuMQfApof92n3aylBPEkMo= @@ -700,23 +197,16 @@ github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af h1:DBNMBMuMiWYu0b+8KM github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af/go.mod h1:5Jv4cbFiHJMsVxt52+i0Ha45fjshj6wxYr1r19tB9bw= github.com/aerospike/aerospike-client-go/v5 v5.6.0 h1:tRxcUq0HY8fFPQEzF3EgrknF+w1xFO0YDfUb9Nm8yRI= github.com/aerospike/aerospike-client-go/v5 v5.6.0/go.mod h1:rJ/KpmClE7kiBPfvAPrGw9WuNOiz8v2uKbQaUyYPXtI= -github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= -github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= -github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= -github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= -github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= -github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= -github.com/alexflint/go-filemutex v1.1.0/go.mod h1:7P4iRhttt/nUvUOrYIhcpMzv2G6CY9UnI16Z+UJqRyk= -github.com/alexflint/go-filemutex v1.2.0/go.mod h1:mYyQSWvw9Tx2/H2n9qXPb52tTYfE0pZAWcBq5mK025c= -github.com/aliyun/alibaba-cloud-sdk-go v1.62.301 h1:8mgvCpqsv3mQAcqZ/baAaMGUBj5J6MKMhxLd+K8L27Q= -github.com/aliyun/alibaba-cloud-sdk-go v1.62.301/go.mod h1:Api2AkmMgGaSUAhmk76oaFObkoeCPc/bKAqcyplPODs= +github.com/aliyun/alibaba-cloud-sdk-go v0.0.0-20190620160927-9418d7b0cd0f/go.mod h1:myCDvQSzCW+wB1WAlocEru4wMGJxy+vlxHdhegi1CDQ= +github.com/aliyun/alibaba-cloud-sdk-go v1.62.146 h1:zAH0YjWzonbKHvNkfbxqTmX51uHbkQYu+jJah2IAiCA= +github.com/aliyun/alibaba-cloud-sdk-go v1.62.146/go.mod h1:Api2AkmMgGaSUAhmk76oaFObkoeCPc/bKAqcyplPODs= github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190307165228-86c17b95fcd5 h1:nWDRPCyCltiTsANwC/n3QZH7Vww33Npq9MKqlwRzI/c= github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190307165228-86c17b95fcd5/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= @@ -724,14 +214,8 @@ github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220418222510-f25a4f6275ed/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= -github.com/antlr/antlr4/runtime/Go/antlr v1.4.10/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= -github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 h1:q4dksr6ICHXqG5hm0ZW5IHyeEJXoIJSOZeBLmWPNeIQ= -github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40/go.mod h1:Q7yQnSMnLvcXlZ8RV+jwz/6y1rQTqbX6C82SndT52Zs= -github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= -github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= -github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= -github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= +github.com/apache/arrow/go/arrow v0.0.0-20210818145353-234c94e4ce64 h1:ZsPrlYPY/v1PR7pGrmYD/rq5BFiSPalH8i9eEkSfnnI= +github.com/apache/arrow/go/arrow v0.0.0-20210818145353-234c94e4ce64/go.mod h1:2qMFB56yOP3KzkB3PbYZ4AlUFg3a88F67TIx5lB/WwY= github.com/apple/foundationdb/bindings/go v0.0.0-20190411004307-cd5c9d91fad2 h1:VoHKYIXEQU5LWoambPBOvYxyLqZYHuj+rj5DVnMUc3k= github.com/apple/foundationdb/bindings/go v0.0.0-20190411004307-cd5c9d91fad2/go.mod h1:OMVSB21p9+xQUIqlGizHPZfjK+SHws1ht+ZytVDoz9U= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= @@ -740,12 +224,12 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg= github.com/armon/go-metrics v0.3.0/go.mod h1:zXjbSimjXTd7vOpY8B0/2LpvNvDoXBuplAD+gJD3GYs= github.com/armon/go-metrics v0.3.9/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg= @@ -754,74 +238,41 @@ github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGL github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.25.41/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.34.0/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/aws/aws-sdk-go v1.30.27/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= -github.com/aws/aws-sdk-go v1.43.9/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= -github.com/aws/aws-sdk-go v1.43.16/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= -github.com/aws/aws-sdk-go v1.44.268 h1:WoK20tlAvsvQzTcE6TajoprbXmTbcud6MjhErL4P/38= -github.com/aws/aws-sdk-go v1.44.268/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= -github.com/aws/aws-sdk-go-v2 v1.16.16/go.mod h1:SwiyXi/1zTUZ6KIAmLK5V5ll8SiURNUYOqTerZPaF9k= -github.com/aws/aws-sdk-go-v2 v1.17.7 h1:CLSjnhJSTSogvqUGhIC6LqFKATMRexcxLZ0i/Nzk9Eg= -github.com/aws/aws-sdk-go-v2 v1.17.7/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8/go.mod h1:JTnlBSot91steJeti4ryyu/tLd4Sk84O5W22L7O2EQU= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 h1:dK82zF6kkPeCo8J1e+tGx4JdvDIQzj7ygIoLg8WMuGs= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10/go.mod h1:VeTZetY5KRJLuD/7fkQXMU6Mw7H5m/KP2J5Iy9osMno= -github.com/aws/aws-sdk-go-v2/config v1.17.7/go.mod h1:dN2gja/QXxFF15hQreyrqYhLBaQo1d9ZKe/v/uplQoI= -github.com/aws/aws-sdk-go-v2/config v1.18.19 h1:AqFK6zFNtq4i1EYu+eC7lcKHYnZagMn6SW171la0bGw= -github.com/aws/aws-sdk-go-v2/config v1.18.19/go.mod h1:XvTmGMY8d52ougvakOv1RpiTLPz9dlG/OQHsKU/cMmY= -github.com/aws/aws-sdk-go-v2/credentials v1.12.20/go.mod h1:UKY5HyIux08bbNA7Blv4PcXQ8cTkGh7ghHMFklaviR4= -github.com/aws/aws-sdk-go-v2/credentials v1.13.18 h1:EQMdtHwz0ILTW1hoP+EwuWhwCG1hD6l3+RWFQABET4c= -github.com/aws/aws-sdk-go-v2/credentials v1.13.18/go.mod h1:vnwlwjIe+3XJPBYKu1et30ZPABG3VaXJYr8ryohpIyM= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.17/go.mod h1:yIkQcCDYNsZfXpd5UX2Cy+sWA1jPgIhGTw9cOBzfVnQ= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1 h1:gt57MN3liKiyGopcqgNzJb2+d9MJaKT/q1OksHNXVE4= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1/go.mod h1:lfUx8puBRdM5lVVMQlwt2v+ofiG/X6Ms+dy0UkG/kXw= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33/go.mod h1:84XgODVR8uRhmOnUkKGUZKqIMxmjmLOR8Uyp7G/TPwc= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59 h1:E3Y+OfzOK1+rmRo/K2G0ml8Vs+Xqk0kOnf4nS0kUtBc= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59/go.mod h1:1M4PLSBUVfBI0aP+C9XI7SM6kZPCGYyI6izWz0TGprE= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.23/go.mod h1:2DFxAQ9pfIRy0imBCJv+vZ2X6RKxves6fbnEuSry6b4= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31 h1:sJLYcS+eZn5EeNINGHSCRAwUJMFVqklwkH36Vbyai7M= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31/go.mod h1:QT0BqUvX1Bh2ABdTGnjqEjvjzrCfIniM9Sc8zn9Yndo= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.17/go.mod h1:pRwaTYCJemADaqCbUAxltMoHKata7hmB5PjEXeu0kfg= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25 h1:1mnRASEKnkqsntcxHaysxwgVoUUp5dkiB+l3llKnqyg= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25/go.mod h1:zBHOPwhBc3FlQjQJE/D3IfPWiWaQmT06Vq9aNukDo0k= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.24/go.mod h1:jULHjqqjDlbyTa7pfM7WICATnOv+iOhjletM3N0Xbu8= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32 h1:p5luUImdIqywn6JpQsW3tq5GNOxKmOnEpybzPx+d1lk= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32/go.mod h1:XGhIBZDEgfqmFIugclZ6FU7v75nHhBDtzuB4xB/tEi4= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14/go.mod h1:AyGgqiKv9ECM6IZeNQtdT8NnMvUb3/2wokeq2Fgryto= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.23 h1:DWYZIsyqagnWL00f8M/SOr9fN063OEQWn9LLTbdYXsk= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.23/go.mod h1:uIiFgURZbACBEQJfqTZPb/jxO7R+9LeoHUFudtIdeQI= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.9/go.mod h1:a9j48l6yL5XINLHLcOKInjdvknN+vWqPBxqeIDw7ktw= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 h1:y2+VQzC6Zh2ojtV2LoC0MNwHWc6qXv/j2vrQtlftkdA= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11/go.mod h1:iV4q2hsqtNECrfmlXyord9u4zyuFEJX9eLgLpSPzWA8= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18/go.mod h1:NS55eQ4YixUJPTC+INxi2/jCqe1y2Uw3rnh9wEOVJxY= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 h1:CeuSeq/8FnYpPtnuIeLQEEvDv9zUjneuYi8EghMBdwQ= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26/go.mod h1:2UqAAwMUXKeRkAHIlDJqvMVgOWkUi/AUXPk/YIe+Dg4= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.17/go.mod h1:4nYOrY41Lrbk2170/BGkcJKBhws9Pfn8MG3aGqjjeFI= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25 h1:5LHn8JQ0qvjD9L9JhMtylnkcw7j05GDZqM9Oin6hpr0= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25/go.mod h1:/95IA+0lMnzW6XzqYJRpjjsAbKEORVeO0anQqjd2CNU= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17/go.mod h1:YqMdV+gEKCQ59NrB7rzrJdALeBIsYiVi8Inj3+KcqHI= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 h1:e2ooMhpYGhDnBfSvIyusvAwX7KexuZaHbQY2Dyei7VU= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0/go.mod h1:bh2E0CXKZsQN+faiKVqC40vfNMAWheoULBCnEgO9K+8= -github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11/go.mod h1:fmgDANqTUCxciViKl9hb/zD5LFbvPINFRgWhDbR+vZo= -github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0 h1:B1G2pSPvbAtQjilPq+Y7jLIzCOwKzuVEl+aBBaNG0AQ= -github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0/go.mod h1:ncltU6n4Nof5uJttDtcNQ537uNuwYqsZZQcpkd2/GUQ= -github.com/aws/aws-sdk-go-v2/service/sso v1.11.23/go.mod h1:/w0eg9IhFGjGyyncHIQrXtU8wvNsTJOP0R6PPj0wf80= -github.com/aws/aws-sdk-go-v2/service/sso v1.12.6 h1:5V7DWLBd7wTELVz5bPpwzYy/sikk0gsgZfj40X+l5OI= -github.com/aws/aws-sdk-go-v2/service/sso v1.12.6/go.mod h1:Y1VOmit/Fn6Tz1uFAeCO6Q7M2fmfXSCLeL5INVYsLuY= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.13.5/go.mod h1:csZuQY65DAdFBt1oIjO5hhBR49kQqop4+lcuCjf2arA= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6 h1:B8cauxOH1W1v7rd8RdI/MWnoR4Ze0wIHWrb90qczxj4= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6/go.mod h1:Lh/bc9XUf8CfOY6Jp5aIkQtN+j1mc+nExc+KXj9jx2s= -github.com/aws/aws-sdk-go-v2/service/sts v1.16.19/go.mod h1:h4J3oPZQbxLhzGnk+j9dfYHi5qIOVJ5kczZd658/ydM= -github.com/aws/aws-sdk-go-v2/service/sts v1.18.7 h1:bWNgNdRko2x6gqa0blfATqAZKZokPIeM1vfmQt2pnvM= -github.com/aws/aws-sdk-go-v2/service/sts v1.18.7/go.mod h1:JuTnSoeePXmMVe9G8NcjjwgOKEfZ4cOjMuT2IBT/2eI= -github.com/aws/smithy-go v1.13.3/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= -github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8= -github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= +github.com/aws/aws-sdk-go v1.36.29/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= +github.com/aws/aws-sdk-go v1.44.191 h1:GnbkalCx/AgobaorDMFCa248acmk+91+aHBQOk7ljzU= +github.com/aws/aws-sdk-go v1.44.191/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go-v2 v1.8.0 h1:HcN6yDnHV9S7D69E7To0aUppJhiJNEzQSNcUxc7r3qo= +github.com/aws/aws-sdk-go-v2 v1.8.0/go.mod h1:xEFuWz+3TYdlPRuo+CqATbeDWIWyaT5uAPwPaWtgse0= +github.com/aws/aws-sdk-go-v2/config v1.6.0 h1:rtoCnNObhVm7me+v9sA2aY+NtHNZjjWWC3ifXVci+wE= +github.com/aws/aws-sdk-go-v2/config v1.6.0/go.mod h1:TNtBVmka80lRPk5+S9ZqVfFszOQAGJJ9KbT3EM3CHNU= +github.com/aws/aws-sdk-go-v2/credentials v1.3.2 h1:Uud/fZzm0lqqhE8kvXYJFAJ3PGnagKoUcvHq1hXfBZw= +github.com/aws/aws-sdk-go-v2/credentials v1.3.2/go.mod h1:PACKuTJdt6AlXvEq8rFI4eDmoqDFC5DpVKQbWysaDgM= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.4.0 h1:SGqDJun6tydgsSIFxv9+EYBJVqVUwg2QMJp6PbNq8C8= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.4.0/go.mod h1:Mj/U8OpDbcVcoctrYwA2bak8k/HFPdcLzI/vaiXMwuM= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.4.0 h1:Iqp2aHeRF3kaaNuDS82bHBzER285NM6lLPAgsxHCR2A= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.4.0/go.mod h1:eHwXu2+uE/T6gpnYWwBwqoeqRf9IXyCcolyOWDRAErQ= +github.com/aws/aws-sdk-go-v2/internal/ini v1.2.0 h1:xu45foJnwMwBqSkIMKyJP9kbyHi5hdhZ/WiJ7D2sHZ0= +github.com/aws/aws-sdk-go-v2/internal/ini v1.2.0/go.mod h1:Q5jATQc+f1MfZp3PDMhn6ry18hGvE0i8yvbXoKbnZaE= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.2.2 h1:YcGVEqLQGHDa81776C3daai6ZkkRGf/8RAQ07hV0QcU= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.2.2/go.mod h1:EASdTcM1lGhUe1/p4gkojHwlGJkeoRjjr1sRCzup3Is= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.2 h1:Xv1rGYgsRRn0xw9JFNnfpBMZam54PrWpC4rJOJ9koA8= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.2/go.mod h1:NXmNI41bdEsJMrD0v9rUvbGCB5GwdBEpKvUvIY3vTFg= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.5.2 h1:ewIpdVz12MDinJJB/nu1uUiFIWFnvtd3iV7cEW7lR+M= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.5.2/go.mod h1:QuL2Ym8BkrLmN4lUofXYq6000/i5jPjosCNK//t6gak= +github.com/aws/aws-sdk-go-v2/service/s3 v1.12.0 h1:cxZbzTYXgiQrZ6u2/RJZAkkgZssqYOdydvJPBgIHlsM= +github.com/aws/aws-sdk-go-v2/service/s3 v1.12.0/go.mod h1:6J++A5xpo7QDsIeSqPK4UHqMSyPOCopa+zKtqAMhqVQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.3.2 h1:b+U3WrF9ON3f32FH19geqmiod4uKcMv/q+wosQjjyyM= +github.com/aws/aws-sdk-go-v2/service/sso v1.3.2/go.mod h1:J21I6kF+d/6XHVk7kp/cx9YVD2TMD2TbLwtRGVcinXo= +github.com/aws/aws-sdk-go-v2/service/sts v1.6.1 h1:1Pls85C5CFjhE3aH+h85/hyAk89kQNlAWlEQtIkaFyc= +github.com/aws/aws-sdk-go-v2/service/sts v1.6.1/go.mod h1:hLZ/AnkIKHLuPGjEiyghNEdvJ2PP0MgOxcmv9EBJ4xs= +github.com/aws/smithy-go v1.7.0 h1:+cLHMRrDZvQ4wk+KuQ9yH6eEg6KZEJ9RI2IkDqnygCg= +github.com/aws/smithy-go v1.7.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= github.com/axiomhq/hyperloglog v0.0.0-20220105174342-98591331716a h1:eqjiAL3qooftPm8b9C1GsSSRcmlw7iOva8vdBTmV2PY= github.com/axiomhq/hyperloglog v0.0.0-20220105174342-98591331716a/go.mod h1:2stgcRjl6QmW+gU2h5E7BQXg4HU0gzxKWDuT5HviN9s= github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f h1:ZNv7On9kyUzm7fvRZumSyy/IUiSC7AzL0I1jKKtwooA= github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= -github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -835,47 +286,35 @@ github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932 h1:mXoPYz/Ul5HYE github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k= github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= -github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= -github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps= -github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI= github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= -github.com/boombuler/barcode v1.0.1 h1:NDBbPmhS+EqABEs5Kg3n/5ZNjy73Pz7SIV+KCeqyXcs= -github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= -github.com/bshuster-repo/logrus-logstash-hook v1.0.0/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= github.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= -github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= github.com/bwesterb/go-ristretto v1.2.0/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= -github.com/bytecodealliance/wasmtime-go v0.36.0/go.mod h1:q320gUxqyI8yB+ZqRuaJOEnGkAnHh6WtJjMaT2CW4wI= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= github.com/cenkalti/backoff/v3 v3.2.2 h1:cfUAAO3yvKMYKPrvhDuHSwQnhZNk/RMHKdZqKTxfm6M= github.com/cenkalti/backoff/v3 v3.2.2/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= -github.com/cenkalti/backoff/v4 v4.1.2/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= -github.com/cenkalti/backoff/v4 v4.2.0 h1:HN5dHm3WBOgndBH6E8V0q2jIYIR3s9yglV8k/+MN3u4= -github.com/cenkalti/backoff/v4 v4.2.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.4.1 h1:iKLQ0xPNFxR/2hzXZMrBo8f1j86j5WHzznCCQxV/b8g= github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= github.com/centrify/cloud-golang-sdk v0.0.0-20210923165758-a8c48d049166 h1:jQ93fKqb/wRmK/KiHpa7Tk9rmHeKXhp4j+5Sg/tENiY= github.com/centrify/cloud-golang-sdk v0.0.0-20210923165758-a8c48d049166/go.mod h1:c/gmvyN8lq6lYtHvrqqoXrg2xyN65N0mBmbikxFWXNE= -github.com/certifi/gocertifi v0.0.0-20191021191039-0944d244cd40/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= -github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw= @@ -892,7 +331,6 @@ github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX github.com/cilium/ebpf v0.4.0/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= github.com/cilium/ebpf v0.7.0/go.mod h1:/oI2+1shJiTGAMgl6/RgJr36Eo1jzrRcAWbcXO2usCA= -github.com/cilium/ebpf v0.9.1/go.mod h1:+OhNOIXx/Fnu1IE8bJz2dzOA+VSfyTfdNUVdlQnxUFY= github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible h1:C29Ae4G5GtYyYMm1aztcyj/J5ckgJm2zwdDajFbx1NY= github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonusllhist v0.1.3 h1:TJH+oke8D16535+jHExHj4nQvzlZrj7ug5D7I/orNUA= @@ -904,7 +342,6 @@ github.com/cloudflare/circl v1.1.0/go.mod h1:prBCrKB9DV4poKZY1l9zBXg2QJY7mvgRvtM github.com/cloudfoundry-community/go-cfclient v0.0.0-20210823134051-721f0e559306 h1:k8q2Nsz7kNaUlysVCnWIFLMUSqiKXaGLdIf9P0GsX2Y= github.com/cloudfoundry-community/go-cfclient v0.0.0-20210823134051-721f0e559306/go.mod h1:0FdHblxw7g3M2PPICOw9i8YZOHP9dZTHbJUtoxL7Z/E= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe h1:QQ3GSy+MqSHxm/d8nCtnAiZdYFd45cYZPs8vOOIYKfk= @@ -912,23 +349,17 @@ github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XP github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b h1:ACGZRIr7HsgBKHsueQ1yM4WaVaXh21ynwqsF8M8tXhA= github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20230310173818-32f1caf87195 h1:58f1tJ1ra+zFINPlwLWvQsR9CzAKt2e+EWV2yX9oXQ4= -github.com/cncf/xds/go v0.0.0-20230310173818-32f1caf87195/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/cockroachdb/cockroach-go v0.0.0-20181001143604-e0a95dfd547c h1:2zRrJWIt/f9c9HhNHAgrRgq0San5gRRUJTBXLkchal0= github.com/cockroachdb/cockroach-go v0.0.0-20181001143604-e0a95dfd547c/go.mod h1:XGLbWH/ujMcbPbhZq52Nv6UrCghb1yGn//133kEsvDk= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= -github.com/cockroachdb/datadriven v0.0.0-20200714090401-bf6692d28da5/go.mod h1:h6jFvWxBdQXxjopDMZyH2UVceIRfR84bdzbkoKrsWNo= -github.com/cockroachdb/errors v1.2.4/go.mod h1:rQD95gz6FARkaKkQXUksEje/d9a6wBJoCr5oaCLELYA= -github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f/go.mod h1:i/u985jwjWRlyHXQbwatDASoW0RMlZ/3i9yJHE2xLkI= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0 h1:sDMmm+q/3+BukdIpxwO365v/Rbspp2Nt5XntgQRXq8Q= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= -github.com/container-orchestrated-devices/container-device-interface v0.5.4/go.mod h1:DjE95rfPiiSmG7uVXtg0z6MnPm/Lx4wxKCIts0ZE0vg= github.com/containerd/aufs v0.0.0-20200908144142-dab0cbea06f4/go.mod h1:nukgQABAEopAHvB6j7cnP5zJ+/3aVcE7hCYqvIwAHyE= github.com/containerd/aufs v0.0.0-20201003224125-76a6863f2989/go.mod h1:AkGGQs9NM2vtYHaUen+NljV0/baGCAPELGm2q9ZXpWU= github.com/containerd/aufs v0.0.0-20210316121734-20793ff83c97/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= @@ -936,7 +367,6 @@ github.com/containerd/aufs v1.0.0/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj github.com/containerd/btrfs v0.0.0-20201111183144-404b9149801e/go.mod h1:jg2QkJcsabfHugurUvvPhS3E08Oxiuh5W/g1ybB4e0E= github.com/containerd/btrfs v0.0.0-20210316141732-918d888fb676/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= github.com/containerd/btrfs v1.0.0/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= -github.com/containerd/btrfs/v2 v2.0.0/go.mod h1:swkD/7j9HApWpzl8OHfrHNxppPd9l44DFZdF94BUj9k= github.com/containerd/cgroups v0.0.0-20190717030353-c4b9ac5c7601/go.mod h1:X9rLEHIqSf/wfK8NsPqxJmeZgW4pcfzdXITDrUSJ6uI= github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko= github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM= @@ -944,9 +374,8 @@ github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4S github.com/containerd/cgroups v0.0.0-20200824123100-0b889c03f102/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= github.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= github.com/containerd/cgroups v1.0.1/go.mod h1:0SJrPIenamHDcZhEcJMNBB85rHcUsw4f25ZfBiPYRkU= +github.com/containerd/cgroups v1.0.3 h1:ADZftAkglvCiD44c77s5YmMqaP2pzVCFZvBmAlBdAP4= github.com/containerd/cgroups v1.0.3/go.mod h1:/ofk34relqNjSGyqPrmEULrO4Sc8LJhvJmWbUCUKqj8= -github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw= -github.com/containerd/cgroups/v3 v3.0.1/go.mod h1:/vtwk1VXrtoa5AaZLkypuOJgA/6DyPMZHJPGQNtlHnw= github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= github.com/containerd/console v0.0.0-20181022165439-0650fd9eeb50/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= github.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE= @@ -961,28 +390,24 @@ github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMX github.com/containerd/containerd v1.4.0-beta.2.0.20200729163537-40b22ef07410/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= github.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= github.com/containerd/containerd v1.4.3/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.4.9/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= github.com/containerd/containerd v1.5.0-beta.1/go.mod h1:5HfvG1V2FsKesEGQ17k5/T7V960Tmcumvqn8Mc+pCYQ= github.com/containerd/containerd v1.5.0-beta.3/go.mod h1:/wr9AVtEM7x9c+n0+stptlo/uBBoBORwEx6ardVcmKU= github.com/containerd/containerd v1.5.0-beta.4/go.mod h1:GmdgZd2zA2GYIBZ0w09ZvgqEq8EfBp/m3lcVZIvPHhI= github.com/containerd/containerd v1.5.0-rc.0/go.mod h1:V/IXoMqNGgBlabz3tHD2TWDoTJseu1FGOKuoA4nNb2s= github.com/containerd/containerd v1.5.1/go.mod h1:0DOxVqwDy2iZvrZp2JUx/E+hS0UNTVn7dJnIOwtYR4g= github.com/containerd/containerd v1.5.7/go.mod h1:gyvv6+ugqY25TiXxcZC3L5yOeYgEw0QMhscqVp1AR9c= -github.com/containerd/containerd v1.5.8/go.mod h1:YdFSv5bTFLpG2HIYmfqDpSYYTDX+mc5qtSuYx1YUb/s= -github.com/containerd/containerd v1.6.1/go.mod h1:1nJz5xCZPusx6jJU8Frfct988y0NpumIq9ODB0kLtoE= -github.com/containerd/containerd v1.6.6/go.mod h1:ZoP1geJldzCVY3Tonoz7b1IXk8rIX0Nltt5QE4OMNk0= -github.com/containerd/containerd v1.6.8/go.mod h1:By6p5KqPK0/7/CgO/A6t/Gz+CUYUu2zf1hUaaymVXB0= -github.com/containerd/containerd v1.6.9/go.mod h1:XVicUvkxOrftE2Q1YWUXgZwkkAxwQYNOFzYWvfVfEfQ= -github.com/containerd/containerd v1.7.0 h1:G/ZQr3gMZs6ZT0qPUZ15znx5QSdQdASW11nXTLTM2Pg= -github.com/containerd/containerd v1.7.0/go.mod h1:QfR7Efgb/6X2BDpTPJRvPTYDE9rsF0FsXX9J8sIs/sc= +github.com/containerd/containerd v1.5.17 h1:NLDEI//zhMZpR3DS/AP0qiN+dzYKNAwJaNXCnCmYcgY= +github.com/containerd/containerd v1.5.17/go.mod h1:7IN9MtIzTZH4WPEmD1gNH8bbTQXVX68yd3ZXxSHYCis= github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= github.com/containerd/continuity v0.0.0-20190815185530-f2a389ac0a02/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20190827140505-75bee3e2ccb6/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= github.com/containerd/continuity v0.0.0-20191127005431-f65d91d395eb/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo= github.com/containerd/continuity v0.0.0-20201208142359-180525291bb7/go.mod h1:kR3BEg7bDFaEddKm54WSmrol1fKWDU1nKYkgrcgZT7Y= github.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e/go.mod h1:EXlVlkqNba9rJe3j7w3Xa924itAMLgZH4UD/Q4PExuQ= github.com/containerd/continuity v0.1.0/go.mod h1:ICJu0PwR54nI0yPEnJ6jcS+J7CZAUXrLh8lPo2knzsM= -github.com/containerd/continuity v0.2.2/go.mod h1:pWygW9u7LtS1o4N/Tn0FoCFDIXZ7rxcMX7HX1Dmibvk= +github.com/containerd/continuity v0.2.0/go.mod h1:wCYX+dRqZdImhGucXOqTQn05AhX6EUDaGEMUzTFFpLg= +github.com/containerd/continuity v0.2.1/go.mod h1:wCYX+dRqZdImhGucXOqTQn05AhX6EUDaGEMUzTFFpLg= github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM= github.com/containerd/fifo v0.0.0-20180307165137-3d5202aec260/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= @@ -991,13 +416,8 @@ github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZ github.com/containerd/fifo v0.0.0-20201026212402-0724c46b320c/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= github.com/containerd/fifo v0.0.0-20210316144830-115abcc95a1d/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= github.com/containerd/fifo v1.0.0/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= -github.com/containerd/fifo v1.1.0/go.mod h1:bmC4NWMbXlt2EZ0Hc7Fx7QzTFxgPID13eH0Qu+MAb2o= github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU= github.com/containerd/go-cni v1.0.2/go.mod h1:nrNABBHzu0ZwCug9Ije8hL2xBCYh/pjfMb1aZGrrohk= -github.com/containerd/go-cni v1.1.0/go.mod h1:Rflh2EJ/++BA2/vY5ao3K6WJRR/bZKsX123aPk+kUtA= -github.com/containerd/go-cni v1.1.3/go.mod h1:Rflh2EJ/++BA2/vY5ao3K6WJRR/bZKsX123aPk+kUtA= -github.com/containerd/go-cni v1.1.6/go.mod h1:BWtoWl5ghVymxu6MBjg79W9NZrCRyHIdUtk4cauMe34= -github.com/containerd/go-cni v1.1.9/go.mod h1:XYrZJ1d5W6E2VOvjffL3IZq0Dz6bsVlERHbekNK90PM= github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= github.com/containerd/go-runc v0.0.0-20190911050354-e029b79d8cda/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g= @@ -1007,28 +427,19 @@ github.com/containerd/imgcrypt v1.0.1/go.mod h1:mdd8cEPW7TPgNG4FpuP3sGBiQ7Yi/zak github.com/containerd/imgcrypt v1.0.4-0.20210301171431-0ae5c75f59ba/go.mod h1:6TNsg0ctmizkrOgXRNQjAPFWpMYRWuiB6dSF4Pfa5SA= github.com/containerd/imgcrypt v1.1.1-0.20210312161619-7ed62a527887/go.mod h1:5AZJNI6sLHJljKuI9IHnw1pWqo/F0nGDOuR9zgTs7ow= github.com/containerd/imgcrypt v1.1.1/go.mod h1:xpLnwiQmEUJPvQoAapeb2SNCxz7Xr6PJrXQb0Dpc4ms= -github.com/containerd/imgcrypt v1.1.3/go.mod h1:/TPA1GIDXMzbj01yd8pIbQiLdQxed5ue1wb8bP7PQu4= -github.com/containerd/imgcrypt v1.1.4/go.mod h1:LorQnPtzL/T0IyCeftcsMEO7AqxUDbdO8j/tSUpgxvo= -github.com/containerd/imgcrypt v1.1.7/go.mod h1:FD8gqIcX5aTotCtOmjeCsi3A1dHmTZpnMISGKSczt4k= github.com/containerd/nri v0.0.0-20201007170849-eb1350a75164/go.mod h1:+2wGSDGFYfE5+So4M5syatU0N0f0LbWpuqyMi4/BE8c= github.com/containerd/nri v0.0.0-20210316161719-dbaa18c31c14/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= github.com/containerd/nri v0.1.0/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= -github.com/containerd/nri v0.3.0/go.mod h1:Zw9q2lP16sdg0zYybemZ9yTDy8g7fPCIB3KXOGlggXI= github.com/containerd/stargz-snapshotter/estargz v0.4.1/go.mod h1:x7Q9dg9QYb4+ELgxmo4gBUeJB0tl5dqH1Sdz0nJU1QM= -github.com/containerd/stargz-snapshotter/estargz v0.12.1/go.mod h1:12VUuCq3qPq4y8yUW+l5w3+oXV3cx2Po3KSe/SmPGqw= github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= github.com/containerd/ttrpc v0.0.0-20190828172938-92c8520ef9f8/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= github.com/containerd/ttrpc v0.0.0-20191028202541-4f1b8fe65a5c/go.mod h1:LPm1u0xBw8r8NOKoOdNMeVHSawSsltak+Ihv+etqsE8= github.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= github.com/containerd/ttrpc v1.0.2/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= -github.com/containerd/ttrpc v1.1.0/go.mod h1:XX4ZTnoOId4HklF4edwc4DcqskFZuvXB1Evzy5KFQpQ= -github.com/containerd/ttrpc v1.1.1-0.20220420014843-944ef4a40df3/go.mod h1:YYyNVhZrTMiaf51Vj6WhAJqJw+vl/nzABhj8pWrzle4= -github.com/containerd/ttrpc v1.2.1/go.mod h1:sIT6l32Ph/H9cvnJsfXM5drIVzTr5A2flTf1G5tYZak= github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc= github.com/containerd/typeurl v0.0.0-20190911142611-5eb25027c9fd/go.mod h1:GeKYzf2pQcqv7tJ0AoCuuhtnqhva5LNU3U+OyKxxJpk= github.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg= github.com/containerd/typeurl v1.0.2/go.mod h1:9trJWW2sRlGub4wZJRTW83VtbOLS6hwcDZXTn6oPz9s= -github.com/containerd/typeurl/v2 v2.1.0/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0= github.com/containerd/zfs v0.0.0-20200918131355-0a33824f23a2/go.mod h1:8IgZOBdv8fAgXddBT4dBXJPtxyRsejFIpXoklgxgEjw= github.com/containerd/zfs v0.0.0-20210301145711-11e8f1707f62/go.mod h1:A9zfAbMlQwE+/is6hi0Xw8ktpL+6glmqZYtevJgaB8Y= github.com/containerd/zfs v0.0.0-20210315114300-dde8f0fda960/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= @@ -1037,27 +448,16 @@ github.com/containerd/zfs v1.0.0/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNR github.com/containernetworking/cni v0.7.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= github.com/containernetworking/cni v0.8.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= -github.com/containernetworking/cni v1.0.1/go.mod h1:AKuhXbN5EzmD4yTNtfSsX3tPcmtrBI6QcRV0NiNt15Y= -github.com/containernetworking/cni v1.1.1/go.mod h1:sDpYKmGVENF3s6uvMvGgldDWeG8dMxakj/u+i9ht9vw= -github.com/containernetworking/cni v1.1.2/go.mod h1:sDpYKmGVENF3s6uvMvGgldDWeG8dMxakj/u+i9ht9vw= github.com/containernetworking/plugins v0.8.6/go.mod h1:qnw5mN19D8fIwkqW7oHHYDHVlzhJpcY6TQxn/fUyDDM= github.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8= -github.com/containernetworking/plugins v1.0.1/go.mod h1:QHCfGpaTwYTbbH+nZXKVTxNBDZcxSOplJT5ico8/FLE= -github.com/containernetworking/plugins v1.1.1/go.mod h1:Sr5TH/eBsGLXK/h71HeLfX19sZPp3ry5uHSkI4LPxV8= -github.com/containernetworking/plugins v1.2.0/go.mod h1:/VjX4uHecW5vVimFa1wkG4s+r/s9qIfPdqlLF4TW8c4= github.com/containers/ocicrypt v1.0.1/go.mod h1:MeJDzk1RJHv89LjsH0Sp5KTY3ZYkjXO/C+bKAeWFIrc= github.com/containers/ocicrypt v1.1.0/go.mod h1:b8AOe0YR67uU8OqfVNcznfFpAzu3rdgUV4GP9qXPfu4= github.com/containers/ocicrypt v1.1.1/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= -github.com/containers/ocicrypt v1.1.2/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= -github.com/containers/ocicrypt v1.1.3/go.mod h1:xpdkbVAuaH3WzbEabUd5yDsl9SwJA5pABH85425Es2g= -github.com/containers/ocicrypt v1.1.6/go.mod h1:WgjxPWdTJMqYMjf3M6cuIFFA1/MpyyhIM99YInA+Rvc= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-iptables v0.4.5/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= github.com/coreos/go-iptables v0.5.0/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= -github.com/coreos/go-iptables v0.6.0/go.mod h1:Qe8Bv2Xik5FyTXwgIbLAnv2sWSBmvWdFETJConOQ//Q= github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= github.com/coreos/go-oidc v2.2.1+incompatible h1:mh48q/BqXqgjVHpy2ZY7WnWAbenxRjsz9N1i1YxjHAk= github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= @@ -1074,54 +474,38 @@ github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf h1:iW4rZ826su+pq github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= +github.com/coreos/go-systemd/v22 v22.3.2 h1:D9/bQk5vlXQFZ6Kwuu6zaiXJ9oTPe68++AzAJc1DzSI= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= -github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/couchbase/gocb/v2 v2.6.3 h1:5RsMo+RRfK0mVxHLAfpBz3/tHlgXZb1WBNItLk9Ab+c= -github.com/couchbase/gocb/v2 v2.6.3/go.mod h1:yF5F6BHTZ/ZowhEuZbySbXrlI4rHd1TIhm5azOaMbJU= -github.com/couchbase/gocbcore/v10 v10.2.3 h1:PEkRSNSkKjUBXx82Ucr094+anoiCG5GleOOQZOHo6D4= -github.com/couchbase/gocbcore/v10 v10.2.3/go.mod h1:lYQIIk+tzoMcwtwU5GzPbDdqEkwkH3isI2rkSpfL0oM= -github.com/couchbaselabs/gocaves/client v0.0.0-20230307083111-cc3960c624b1/go.mod h1:AVekAZwIY2stsJOMWLAS/0uA/+qdp7pjO8EHnl61QkY= -github.com/couchbaselabs/gocaves/client v0.0.0-20230404095311-05e3ba4f0259 h1:2TXy68EGEzIMHOx9UvczR5ApVecwCfQZ0LjkmwMI6g4= -github.com/couchbaselabs/gocaves/client v0.0.0-20230404095311-05e3ba4f0259/go.mod h1:AVekAZwIY2stsJOMWLAS/0uA/+qdp7pjO8EHnl61QkY= +github.com/couchbase/gocb/v2 v2.3.3 h1:OItaIrFqXR1ba9J77E2YOU+CSF9G9FHYivV26Xgoi98= +github.com/couchbase/gocb/v2 v2.3.3/go.mod h1:h4b3UYDnGI89hMW9VypVjAr+EE0Ki4jjlXJrVdmSZhQ= +github.com/couchbase/gocbcore/v10 v10.0.4 h1:RJ+dSXxMUbrpfgYEEUhMYwPH1S5KvcQYve3D2aKHP28= +github.com/couchbase/gocbcore/v10 v10.0.4/go.mod h1:s6dwBFs4c3+cAzZbo1q0VW+QasudhHJuehE8b8U2YNg= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= -github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4= github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ= github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= github.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjIciD2oAxI7DmWRx6gbeqrkoLqv3MV0vzNad+I= -github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7hqDjlFjiygg= -github.com/danieljoos/wincred v1.1.2 h1:QLdCxFs1/Yl4zduvBdcHB8goaYk9RARS2SgLLRuAyr0= -github.com/danieljoos/wincred v1.1.2/go.mod h1:GijpziifJoIBfYh+S7BbkdUTU4LfM+QnGqR5Vl2tAx0= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d/go.mod h1:tmAIfUFEirG/Y8jhZ9M+h36obRZAk/1fcSpXwAVlfqE= github.com/denisenkom/go-mssqldb v0.12.2 h1:1OcPn5GBIobjWNd+8yjfHNIaFX14B1pWI3F9HZy5KXw= github.com/denisenkom/go-mssqldb v0.12.2/go.mod h1:lnIw1mZukFRZDJYQ0Pb833QS2IaC3l5HkEfra2LJ+sk= github.com/denverdino/aliyungo v0.0.0-20170926055100-d3308649c661/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba h1:p6poVbjHDkKa+wtC8frBMwQtT3BmqGYBjzMwJ63tuR4= github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= -github.com/dgraph-io/badger/v3 v3.2103.2/go.mod h1:RHo4/GmYcKKh5Lxu63wLEMHJ70Pac2JqZRYGhlyAo2M= -github.com/dgraph-io/ristretto v0.1.0/go.mod h1:fux0lOrBhrVCJd3lcTHsIJhq1T2rokOu6v9Vcb3Q9ug= github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= -github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8= github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= @@ -1130,31 +514,29 @@ github.com/digitalocean/godo v1.7.5/go.mod h1:h6faOIcZ8lWIwNQ+DN7b3CgX4Kwby5T+nb github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= -github.com/distribution/distribution/v3 v3.0.0-20220526142353-ffbd94cbe269/go.mod h1:28YO/VJk9/64+sTGNuYaBjWxrXTPrj0C0XmgTIOjxX4= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/dnephin/pflag v1.0.7 h1:oxONGlWxhmUct0YzKTgrpQv9AUA1wtPBn7zuSjJqptk= github.com/dnephin/pflag v1.0.7/go.mod h1:uxE91IoWURlOiTUIA8Mq5ZZkAv3dPUfZNaT80Zm7OQE= github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/cli v20.10.17+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/cli v20.10.20+incompatible h1:lWQbHSHUFs7KraSN2jOJK7zbMS2jNCHI4mt4xUFUVQ4= -github.com/docker/cli v20.10.20+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli v20.10.8+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli v20.10.9+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli v20.10.18+incompatible h1:f/GQLsVpo10VvToRay2IraVA1wHz9KktZyjev3SIVDU= +github.com/docker/cli v20.10.18+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY= github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.8.1+incompatible h1:Q50tZOPR6T/hjNsyc9g8/syEs6bk8XXApsHjKukMl68= github.com/docker/distribution v2.8.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= -github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v1.4.2-0.20200319182547-c7ad2b866182/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v20.10.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v20.10.17+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v20.10.20+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v23.0.4+incompatible h1:Kd3Bh9V/rO+XpTP/BLqM+gx8z7+Yb0AA2Ibj+nNo4ek= -github.com/docker/docker v23.0.4+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v20.10.9+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v20.10.10+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v20.10.18+incompatible h1:SN84VYXTBNGn92T/QwIRPlum9zfemfitN7pbsp26WSc= +github.com/docker/docker v20.10.18+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= -github.com/docker/docker-credential-helpers v0.6.4/go.mod h1:ofX3UI0Gz1TteYBjtgs07O36Pyasyp66D2uKT7H8W1c= -github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-events v0.0.0-20170721190031-9461782956ad/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA= @@ -1176,80 +558,58 @@ github.com/duosecurity/duo_api_golang v0.0.0-20190308151101-6c680f768e74/go.mod github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/dvsekhvalnov/jose2go v1.5.0 h1:3j8ya4Z4kMCwT5nXIKFSV84YS+HdqSSO0VsTQxaLAeM= -github.com/dvsekhvalnov/jose2go v1.5.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/emicklei/go-restful/v3 v3.8.0 h1:eCZ8ulSerjdAiaNpF7GxXIE7ZCMo1moN1qX+S609eVw= github.com/emicklei/go-restful/v3 v3.8.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= -github.com/emicklei/go-restful/v3 v3.9.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= -github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ= -github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= -github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= +github.com/envoyproxy/go-control-plane v0.10.3 h1:xdCVXxEe0Y3FQith+0cj2irwZudqGYvecuLB1HtdexY= github.com/envoyproxy/go-control-plane v0.10.3/go.mod h1:fJJn/j26vwOu972OllsvAgJJM//w9BV6Fxbg2LuVd34= -github.com/envoyproxy/go-control-plane v0.11.0 h1:jtLewhRR2vMRNnq2ZZUoCjUlgut+Y0+sDDWPOfwOi1o= -github.com/envoyproxy/go-control-plane v0.11.0/go.mod h1:VnHyVMpzcLvCFt9yUz1UnCwHLhwx1WguiVDV7pTG/tI= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.6.7/go.mod h1:dyJXwwfPK2VSqiB9Klm1J6romD608Ba7Hij42vrOBCo= +github.com/envoyproxy/protoc-gen-validate v0.9.1 h1:PS7VIOgmSVhWUEeZwTe7z7zouA22Cr590PzXKbZHOVY= github.com/envoyproxy/protoc-gen-validate v0.9.1/go.mod h1:OKNgG7TCp5pF4d6XftA0++PMirau2/yoOwVac3AbF2w= -github.com/envoyproxy/protoc-gen-validate v0.10.0 h1:oIfnZFdC0YhpNNEX+SuIqko4cqqVZeN9IGTrhZje83Y= -github.com/envoyproxy/protoc-gen-validate v0.10.0/go.mod h1:DRjgyB0I43LtJapqN6NiRwroiAU2PaFuvk/vjgh61ss= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/evanphx/json-patch v4.11.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch/v5 v5.5.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/fatih/color v1.14.1 h1:qfhVLaG5s+nCROl1zJsZRxFeYrHLqWroPOQ8BWiNb4w= +github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= -github.com/favadi/protoc-go-inject-tag v1.4.0 h1:K3KXxbgRw5WT4f43LbglARGz/8jVsDOS7uMjG4oNvXY= -github.com/favadi/protoc-go-inject-tag v1.4.0/go.mod h1:AZ+PK+QDKUOLlBRG0rYiKkUX5Hw7+7GTFzlU99GFSbQ= -github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= -github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/favadi/protoc-go-inject-tag v1.3.0 h1:JPrmsmc/uBShG85uY5xGZIa5WJ0IaNZn6LZhQR9tIQE= +github.com/favadi/protoc-go-inject-tag v1.3.0/go.mod h1:SSkUBgfqw2IJ2p7NPNKWk0Idwxt/qIt2LQgFPUgRGtc= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/form3tech-oss/jwt-go v3.2.3+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8= github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= -github.com/foxcpp/go-mockdns v0.0.0-20210729171921-fb145fc6f897/go.mod h1:lgRN6+KxQBawyIghpnl5CezHFGS9VLzvtVlwxvzXTQ4= +github.com/frankban/quicktest v1.10.0/go.mod h1:ui7WezCLWMWxVWr1GETZY3smRy0G4KWq9vcPtJmFl7Y= github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= github.com/frankban/quicktest v1.13.0/go.mod h1:qLE0fzW0VuyUAJgPU19zByoIr0HtCHN/r/VLSOOIySU= -github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= -github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= +github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= -github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= -github.com/fxamacker/cbor/v2 v2.4.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= -github.com/gabriel-vasile/mimetype v1.4.1/go.mod h1:05Vi0w3Y9c/lNvJOdmIwvrrAhX3rYhfQQCaf9VJcv7M= -github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= -github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= -github.com/gammazero/deque v0.2.1 h1:qSdsbG6pgp6nL7A0+K/B7s12mcCY/5l5SIUpMOl+dC0= -github.com/gammazero/deque v0.2.1/go.mod h1:LFroj8x4cMYCukHJDbxFCkT+r9AndaJnFMuZDV34tuU= -github.com/gammazero/workerpool v1.1.3 h1:WixN4xzukFoN0XSeXF6puqEqFTl2mECI9S6W44HWy9Q= -github.com/gammazero/workerpool v1.1.3/go.mod h1:wPjyBLDbyKnUn2XwwyD3EEwo9dHutia9/fwNmSHWACc= +github.com/gabriel-vasile/mimetype v1.3.1 h1:qevA6c2MtE1RorlScnixeG0VA1H4xrXyhyX3oWBynNQ= +github.com/gabriel-vasile/mimetype v1.3.1/go.mod h1:fA8fi6KUiG7MgQQ+mEWotXoEOvmxRtOJlERCzSmRvr8= +github.com/gammazero/deque v0.0.0-20190130191400-2afb3858e9c7 h1:D2LrfOPgGHQprIxmsTpxtzhpmF66HoM6rXSmcqaX7h8= +github.com/gammazero/deque v0.0.0-20190130191400-2afb3858e9c7/go.mod h1:GeIq9qoE43YdGnDXURnmKTnGg15pQz4mYkXSTChbneI= +github.com/gammazero/workerpool v0.0.0-20190406235159-88d534f22b56 h1:VzbudKn/nvxYKOdzgkEBS6SSreRjAgoJ+ZeS4wPFkgc= +github.com/gammazero/workerpool v0.0.0-20190406235159-88d534f22b56/go.mod h1:w9RqFVO2BM3xwWEcAB8Fwp0OviTBBEiRmSBDfbXnd3w= github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= -github.com/getkin/kin-openapi v0.76.0/go.mod h1:660oXbgy5JFMKreazJaQTw7o+X00qeSyhcnluiMv+Xg= -github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 h1:Mn26/9ZMNWSw9C9ERFA1PUxfmGpolnw2v0bKOREu5ew= @@ -1262,51 +622,36 @@ github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0 github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/go-asn1-ber/asn1-ber v1.3.1/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0= github.com/go-asn1-ber/asn1-ber v1.4.1/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0= +github.com/go-asn1-ber/asn1-ber v1.5.1 h1:pDbRAunXzIUXfx4CB2QJFv5IuPiuoW+sWvr/Us009o8= github.com/go-asn1-ber/asn1-ber v1.5.1/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0= -github.com/go-asn1-ber/asn1-ber v1.5.4 h1:vXT6d/FNDiELJnLb6hGNa309LMsrCoYFvpwHDF0+Y1A= -github.com/go-asn1-ber/asn1-ber v1.5.4/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0= -github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= -github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= -github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= -github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= -github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= -github.com/go-fonts/liberation v0.2.0/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= -github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY= +github.com/go-errors/errors v1.4.1 h1:IvVlgbzSsaUNudsw5dcXSzF3EWyXTi5XrAdngnuhRyg= +github.com/go-errors/errors v1.4.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= -github.com/go-ini/ini v1.66.6/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= -github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U= -github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk= github.com/go-ldap/ldap/v3 v3.1.7/go.mod h1:5Zun81jBTabRaI8lzN7E1JjyEl1g6zI6u9pd8luAK4Q= +github.com/go-ldap/ldap/v3 v3.1.10/go.mod h1:5Zun81jBTabRaI8lzN7E1JjyEl1g6zI6u9pd8luAK4Q= +github.com/go-ldap/ldap/v3 v3.4.1 h1:fU/0xli6HY02ocbMuozHAYsaHLcnkLjvho2r5a34BUU= github.com/go-ldap/ldap/v3 v3.4.1/go.mod h1:iYS1MdmrmceOJ1QOTnRXrIs7i3kloqtmGQjRvjKpyMg= -github.com/go-ldap/ldap/v3 v3.4.4 h1:qPjipEpt+qDa6SI/h1fzuGWoRUY+qqQ9sOZq67/PYUs= -github.com/go-ldap/ldap/v3 v3.4.4/go.mod h1:fe1MsuN5eJJ1FeLT/LEBVdWfNWKh459R7aXgXtJC+aI= github.com/go-ldap/ldif v0.0.0-20200320164324-fd88d9b715b3 h1:sfz1YppV05y4sYaW7kXZtrocU/+vimnIWt4cxAYh7+o= github.com/go-ldap/ldif v0.0.0-20200320164324-fd88d9b715b3/go.mod h1:ZXFhGda43Z2TVbfGZefXyMJzsDHhCh0go3bZUcwTx7o= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= -github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/stdr v1.2.0/go.mod h1:YkVgnZu1ZjjL7xTxrfm/LLZBfkhTqSR1ydtm6jTKKwI= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-logr/zapr v1.2.3/go.mod h1:eIauM6P8qSvTw5o2ez6UEAfGjQKrxQTl5EoK+Qa2oG4= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab h1:xveKWz2iaueeTaUgdetzel+U7exyigDYBryyVfV/rZk= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= @@ -1336,18 +681,15 @@ github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwds github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= -github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc= github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= +github.com/go-openapi/jsonreference v0.19.5 h1:1WJP/wi4OjB4iV8KVbH73rQaoialJrqv8gitZLxGLtM= github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg= -github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= -github.com/go-openapi/jsonreference v0.20.1 h1:FBLnyygC4/IZZr893oiomc9XaghoveYTrLC1F86HID8= -github.com/go-openapi/jsonreference v0.20.1/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= github.com/go-openapi/loads v0.18.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= github.com/go-openapi/loads v0.19.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= @@ -1397,9 +739,8 @@ github.com/go-openapi/swag v0.19.7/go.mod h1:ao+8BpOPyKdpQz3AOJfbeEVpLmWAvlT1IfT github.com/go-openapi/swag v0.19.9/go.mod h1:ao+8BpOPyKdpQz3AOJfbeEVpLmWAvlT1IfTe5McPyhY= github.com/go-openapi/swag v0.19.12/go.mod h1:eFdyEBkTdoAf/9RXBvj4cr1nH7GD8Kzo5HTt47gr72M= github.com/go-openapi/swag v0.19.13/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng= github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= -github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= -github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/go-openapi/validate v0.18.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4= github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2KDnRCRMUi7GTA= github.com/go-openapi/validate v0.19.3/go.mod h1:90Vh6jjkTn+OT1Eefm0ZixWNFjhtOH7vS9k0lo6zwJo= @@ -1411,8 +752,6 @@ github.com/go-openapi/validate v0.20.2 h1:AhqDegYV3J3iQkMPJSXkvzymHKMTw0BST3RK3h github.com/go-openapi/validate v0.20.2/go.mod h1:e7OJoKNgd0twXZwIn0A43tHbvIcr/rZIVCbJBpTUoY0= github.com/go-ozzo/ozzo-validation v3.6.0+incompatible h1:msy24VGS42fKO9K1vLz82/GeYW1cILu7Nuuj1N3BBkE= github.com/go-ozzo/ozzo-validation v3.6.0+incompatible/go.mod h1:gsEKFIVnabGBt6mXmxK0MoFy+cZoTJY6mu5Ll3LVLBU= -github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= -github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= @@ -1423,11 +762,13 @@ github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GO github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/go-test/deep v1.0.7/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8= +github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg= github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-zookeeper/zk v1.0.3 h1:7M2kwOsc//9VeeFiPtf+uSJlVpU66x9Ba5+8XK7/TDg= @@ -1456,26 +797,20 @@ github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWe github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= -github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= -github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= -github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gocql/gocql v1.0.0 h1:UnbTERpP72VZ/viKE1Q1gPtmLvyTZTvuAstvSRydw/c= github.com/gocql/gocql v1.0.0/go.mod h1:3gM2c4D3AnkISwBxGnMMsS8Oy4y2lhbPRsH4xnJrHG8= github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= -github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= -github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.6/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gofrs/uuid v4.3.0+incompatible h1:CaSVZxm5B+7o45rtab4jC2G37WGYX1zQfuU2i6DSvnc= github.com/gofrs/uuid v4.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= @@ -1491,13 +826,12 @@ github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69 github.com/goji/httpauth v0.0.0-20160601135302-2da839ab0f4d/go.mod h1:nnjvkQ9ptGaCkuDUx6wNykzzlUixGxvkme+H/lnzb+A= github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v4 v4.4.2 h1:rcc4lwaZgFMCZ5jxF9ABolDcIHdBytAFgqFPbSJQAYs= +github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= -github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -1514,7 +848,6 @@ github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -1533,10 +866,8 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= @@ -1544,17 +875,11 @@ github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 h1:DIPQnGy2Gv2FSA4B/hh8Q7xx3B7AIDk3DAMeHclH1vQ= github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6/go.mod h1:0AKcRCkMoKvUvlf89F6O7H2LYdhr1zBh736mBItOdRs= -github.com/gomodule/redigo v1.8.2/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= -github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= -github.com/google/cel-go v0.12.6/go.mod h1:Jk7ljRzLBhkmiAwBoUxB1sZSCVBAzkqPF25olK/iRDw= -github.com/google/flatbuffers v1.12.1/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/flatbuffers v2.0.0+incompatible h1:dicJ2oXwypfwUGnB2/TYWYEKiuk9eYQlQO/AnOHl5mI= github.com/google/flatbuffers v2.0.0+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/flatbuffers v23.1.21+incompatible h1:bUqzx/MXCDxuS0hRJL2EfjyZL3uQrPbMocUa8zGqsTA= -github.com/google/flatbuffers v23.1.21+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/gnostic v0.5.7-v3refs h1:FhTMOKj2VhjpouxvWJAV1TL304uMlb9zcDqkl6cEI54= github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -1574,7 +899,6 @@ github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-containerregistry v0.5.1/go.mod h1:Ct15B4yir3PLOP5jsy0GNeYVaIZs/MK/Jz5any1wFW0= -github.com/google/go-containerregistry v0.13.0/go.mod h1:J9FQ+eSS4a1aC2GNZxvNpbWhgp0487v+cgiilB4FqDo= github.com/google/go-github v17.0.0+incompatible h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-metrics-stackdriver v0.2.0 h1:rbs2sxHAPn2OtUj9JdR/Gij1YKGl0BTVD0augB+HEjE= @@ -1583,16 +907,12 @@ github.com/google/go-querystring v0.0.0-20170111101155-53e6ce116135/go.mod h1:od github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= -github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= -github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -1600,64 +920,34 @@ github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= -github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= -github.com/google/tink/go v1.7.0 h1:6Eox8zONGebBFcCBqkVmt60LaWZa6xg1cl/DwAh/J1w= -github.com/google/tink/go v1.7.0/go.mod h1:GAUOd+QE3pgj9q8VKIGTCP33c/B7eb4NhxLcgTJZStM= +github.com/google/tink/go v1.6.1 h1:t7JHqO8Ath2w2ig5vjwQYJzhGEZymedQc90lQXUBa4I= +github.com/google/tink/go v1.6.1/go.mod h1:IGW53kTgag+st5yPhKKwJ6u2l+SSp5/v9XF7spovjlY= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= -github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= -github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= -github.com/googleapis/enterprise-certificate-proxy v0.2.1/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/enterprise-certificate-proxy v0.2.3 h1:yk9/cqRKtT9wXZSsRH9aurXEpJX+U6FLtpYTdC3R06k= github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= -github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= -github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= -github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= -github.com/googleapis/gax-go/v2 v2.5.1/go.mod h1:h6B0KMMFNtI2ddbGJn3T3ZbwkeT6yqEF02fYlzkUCyo= -github.com/googleapis/gax-go/v2 v2.6.0/go.mod h1:1mjbznJAPHFpesgE5ucqfYEscaz5kMdcIDwU/6+DDoY= -github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= -github.com/googleapis/gax-go/v2 v2.9.1 h1:DpTpJqzZ3NvX9zqjhIuI1oVzYZMvboZe+3LoeEIJjHM= -github.com/googleapis/gax-go/v2 v2.9.1/go.mod h1:4FG3gMrVZlyMp5itSYKMU9z/lBE7+SbnUOvzH2HqbEY= +github.com/googleapis/gax-go/v2 v2.7.1 h1:gF4c0zjUP2H/s/hEGyLA3I0fA2ZWjzYiONAD6cvPr8A= +github.com/googleapis/gax-go/v2 v2.7.1/go.mod h1:4orTrqY6hXxxaUL4LHIPl6lGo8vAE38/qKbhSAKP6QI= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/googleapis/gnostic v0.1.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/googleapis/gnostic v0.2.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= -github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2cUuW7uA/OeU= -github.com/googleapis/gnostic v0.5.5/go.mod h1:7+EbHbldMins07ALC74bsA81Ovc97DwqyJO1AENw9kA= -github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= -github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gophercloud/gophercloud v0.1.0 h1:P/nh25+rzXouhytV2pUHBb65fnds26Ghl8/391+sT5o= github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= -github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= @@ -1672,26 +962,22 @@ github.com/gotestyourself/gotestyourself v2.2.0+incompatible h1:AQwinXlbQR2HvPjQ github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= -github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= -github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= -github.com/hashicorp/cap v0.3.1 h1:JwX2vg3KIl2+ka4VIPB0yWB9PoPvHL3ACmVrLJLCHDQ= -github.com/hashicorp/cap v0.3.1/go.mod h1:dHTmyMIVbzT981XxRoci5G//dfWmd/HhuNiCH6J5+IA= -github.com/hashicorp/consul-template v0.32.0 h1:VIfKjoJLkBYLgHdLH4mR7RstPc549qqHJiecqPwYTis= -github.com/hashicorp/consul-template v0.32.0/go.mod h1:r9mcCoHVkTeVln7aL4Ky+RfKupOtbEW70i8n9YuEe+w= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/api v1.20.0 h1:9IHTjNVSZ7MIwjlW3N3a7iGiykCMDpxZu8jsxFJh0yc= -github.com/hashicorp/consul/api v1.20.0/go.mod h1:nR64eD44KQ59Of/ECwt2vUmIK2DKsDzAwTmwmLl8Wpo= +github.com/hashicorp/cap v0.2.1-0.20230221194157-7894fed1633d h1:29noMC2UssBX3F/BUmk0/j4PRUU4QvPTfyeOn3tmcOA= +github.com/hashicorp/cap v0.2.1-0.20230221194157-7894fed1633d/go.mod h1:dHTmyMIVbzT981XxRoci5G//dfWmd/HhuNiCH6J5+IA= +github.com/hashicorp/consul-template v0.29.5 h1:tzEo93RqODAX2cgOe/ke8xcpdPdxg5rxl6d22wE3f6c= +github.com/hashicorp/consul-template v0.29.5/go.mod h1:SZGBPz/t0JaBwMOqM6q/mG66cBRA8IeDUjOwjO0Pa5M= +github.com/hashicorp/consul/api v1.17.0 h1:aqytbw31uCPNn37ST+717IyGod+P1eTgSGu3yjRo4bs= +github.com/hashicorp/consul/api v1.17.0/go.mod h1:ZNwemOPAdgtV4cCx9fqxNmw+PI3vliW6gYin2WD+F2g= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/consul/sdk v0.13.1 h1:EygWVWWMczTzXGpO93awkHFzfUka6hLYJ0qhETd+6lY= +github.com/hashicorp/consul/sdk v0.13.0 h1:lce3nFlpv8humJL8rNrrGHYSKc3q+Kxfeg3Ii1m6ZWU= +github.com/hashicorp/consul/sdk v0.13.0/go.mod h1:0hs/l5fOVhJy/VdcoaNqUSi2AUs95eF5WKtv+EYIQqE= github.com/hashicorp/cronexpr v1.1.1 h1:NJZDd87hGXjoZBdvyCF9mX4DCq5Wy7+A/w+A7q0wn6c= github.com/hashicorp/cronexpr v1.1.1/go.mod h1:P4wA0KBl9C5q2hABiMO7cp6jcIg96CDh1Efb3g1PWA4= github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -1711,10 +997,11 @@ github.com/hashicorp/go-gcp-common v0.8.0 h1:/2vGAbCU1v+BZ3YHXTCzTvxqma9WOJHYtAD github.com/hashicorp/go-gcp-common v0.8.0/go.mod h1:Q7zYRy9ue9SuaEN2s9YLIQs4SoKHdoRmKRcImY3SLgs= github.com/hashicorp/go-hclog v0.9.1/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v0.16.2/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-hclog v1.4.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-hclog v1.3.1/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= @@ -1723,9 +1010,8 @@ github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJ github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-kms-wrapping/entropy/v2 v2.0.0 h1:pSjQfW3vPtrOTcasTUKgCTQT7OGPPTTMVRrOfU6FJD8= github.com/hashicorp/go-kms-wrapping/entropy/v2 v2.0.0/go.mod h1:xvb32K2keAc+R8DSFG2IwDcydK9DBQE+fGA5fsw6hSk= +github.com/hashicorp/go-kms-wrapping/v2 v2.0.8 h1:9Q2lu1YbbmiAgvYZ7Pr31RdlVonUpX+mmDL7Z7qTA2U= github.com/hashicorp/go-kms-wrapping/v2 v2.0.8/go.mod h1:qTCjxGig/kjuj3hk1z8pOUrzbse/GxB1tGfbrq8tGJg= -github.com/hashicorp/go-kms-wrapping/v2 v2.0.9 h1:JpCvi97NMA+saNqO8ovQcGoRbBq6P5ZZlJqvOsW5ick= -github.com/hashicorp/go-kms-wrapping/v2 v2.0.9/go.mod h1:NtMaPhqSlfQ72XWDD2g80o8HI8RKkowIB8/WZHMyPY4= github.com/hashicorp/go-kms-wrapping/wrappers/aead/v2 v2.0.7-1 h1:ZV26VJYcITBom0QqYSUOIj4HOHCVPEFjLqjxyXV/AbA= github.com/hashicorp/go-kms-wrapping/wrappers/aead/v2 v2.0.7-1/go.mod h1:b99cDSA+OzcyRoBZroSf174/ss/e6gUuS45wue9ZQfc= github.com/hashicorp/go-kms-wrapping/wrappers/alicloudkms/v2 v2.0.1 h1:ydUCtmr8f9F+mHZ1iCsvzqFTXqNVpewX3s9zcYipMKI= @@ -1753,22 +1039,20 @@ github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHh github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-plugin v1.4.3/go.mod h1:5fGEH17QVwTTcR0zV7yhDPLLmFX9YSZ38b18Udy6vYQ= +github.com/hashicorp/go-plugin v1.4.5/go.mod h1:viDMjcLJuDui6pXb8U4HVfb8AamCWhHGUjr2IrTF67s= +github.com/hashicorp/go-plugin v1.4.8 h1:CHGwpxYDOttQOY7HOWgETU9dyVjOXzniXDqJcYJE1zM= github.com/hashicorp/go-plugin v1.4.8/go.mod h1:viDMjcLJuDui6pXb8U4HVfb8AamCWhHGUjr2IrTF67s= -github.com/hashicorp/go-plugin v1.4.9 h1:ESiK220/qE0aGxWdzKIvRH69iLiuN/PjoLTm69RoWtU= -github.com/hashicorp/go-plugin v1.4.9/go.mod h1:viDMjcLJuDui6pXb8U4HVfb8AamCWhHGUjr2IrTF67s= github.com/hashicorp/go-raftchunking v0.6.3-0.20191002164813-7e9e8525653a h1:FmnBDwGwlTgugDGbVxwV8UavqSMACbGrUpfc98yFLR4= github.com/hashicorp/go-raftchunking v0.6.3-0.20191002164813-7e9e8525653a/go.mod h1:xbXnmKqX9/+RhPkJ4zrEx4738HacP72aaUPlT2RZ4sU= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-retryablehttp v0.6.6/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= +github.com/hashicorp/go-retryablehttp v0.7.0/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= +github.com/hashicorp/go-retryablehttp v0.7.1 h1:sUiuQAnLlbvmExtFQs72iFW/HXeUn8Z1aJLQ4LJJbTQ= github.com/hashicorp/go-retryablehttp v0.7.1/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= -github.com/hashicorp/go-retryablehttp v0.7.2 h1:AcYqCvkpalPnPF2pn0KamgwamS42TqUDDYFRKq/RAd0= -github.com/hashicorp/go-retryablehttp v0.7.2/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-secure-stdlib/awsutil v0.2.2 h1:kWg2vyKl7BRXrNxYziqDJ55n+vtOQ1QsGORjzoeB+uM= -github.com/hashicorp/go-secure-stdlib/awsutil v0.2.2/go.mod h1:oKHSQs4ivIfZ3fbXGQOop1XuDfdSb8RIsWTGaAanSfg= +github.com/hashicorp/go-secure-stdlib/awsutil v0.1.6 h1:W9WN8p6moV1fjKLkeqEgkAMu5rauy9QeYDAmIaPuuiA= +github.com/hashicorp/go-secure-stdlib/awsutil v0.1.6/go.mod h1:MpCPSPGLDILGb4JMm94/mMi3YysIqsXzGCzkEZjcjXg= github.com/hashicorp/go-secure-stdlib/base62 v0.1.1/go.mod h1:EdWO6czbmthiwZ3/PUsDV+UD1D5IRU4ActiaWGwt0Yw= github.com/hashicorp/go-secure-stdlib/base62 v0.1.2 h1:ET4pqyjiGmY09R5y+rSd70J2w45CtbWDNvGqWp/R3Ng= github.com/hashicorp/go-secure-stdlib/base62 v0.1.2/go.mod h1:EdWO6czbmthiwZ3/PUsDV+UD1D5IRU4ActiaWGwt0Yw= @@ -1779,11 +1063,8 @@ github.com/hashicorp/go-secure-stdlib/gatedwriter v0.1.1/go.mod h1:6RoRTSMDK2H/r github.com/hashicorp/go-secure-stdlib/kv-builder v0.1.2 h1:NS6BHieb/pDfx3M9jDdaPpGyyVp+aD4A3DjX3dgRmzs= github.com/hashicorp/go-secure-stdlib/kv-builder v0.1.2/go.mod h1:rf5JPE13wi+NwjgsmGkbg4b2CgHq8v7Htn/F0nDe/hg= github.com/hashicorp/go-secure-stdlib/mlock v0.1.1/go.mod h1:zq93CJChV6L9QTfGKtfBxKqD7BqqXx5O04A/ns2p5+I= +github.com/hashicorp/go-secure-stdlib/mlock v0.1.2 h1:p4AKXPPS24tO8Wc8i1gLvSKdmkiSY5xuju57czJ/IJQ= github.com/hashicorp/go-secure-stdlib/mlock v0.1.2/go.mod h1:zq93CJChV6L9QTfGKtfBxKqD7BqqXx5O04A/ns2p5+I= -github.com/hashicorp/go-secure-stdlib/mlock v0.1.3 h1:kH3Rhiht36xhAfhuHyWJDgdXXEx9IIZhDGRk24CDhzg= -github.com/hashicorp/go-secure-stdlib/mlock v0.1.3/go.mod h1:ov1Q0oEDjC3+A4BwsG2YdKltrmEw8sf9Pau4V9JQ4Vo= -github.com/hashicorp/go-secure-stdlib/nonceutil v0.1.0 h1:iJG9Q3iUme12yH+wzBMGYrw/Am4CfX3sDcA8m5OGfhQ= -github.com/hashicorp/go-secure-stdlib/nonceutil v0.1.0/go.mod h1:s28ohJ0kU6tersf0it/WsBCyZSdziPlP+G1FRA3ar28= github.com/hashicorp/go-secure-stdlib/parseutil v0.1.1/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7 h1:UpiO20jno/eV1eVZcxqWnUohyKRe1g8FPV/xH1s/2qs= @@ -1797,24 +1078,25 @@ github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9 github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= github.com/hashicorp/go-secure-stdlib/tlsutil v0.1.2 h1:phcbL8urUzF/kxA/Oj6awENaRwfWsjP59GW7u2qlDyY= github.com/hashicorp/go-secure-stdlib/tlsutil v0.1.2/go.mod h1:l8slYwnJA26yBz+ErHpp2IRCLr0vuOMGBORIz4rRiAs= -github.com/hashicorp/go-slug v0.11.1 h1:c6lLdQnlhUWbS5I7hw8SvfymoFuy6EmiFDedy6ir994= -github.com/hashicorp/go-slug v0.11.1/go.mod h1:Ib+IWBYfEfJGI1ZyXMGNbu2BU+aa3Dzu41RKLH301v4= +github.com/hashicorp/go-slug v0.7.0 h1:8HIi6oreWPtnhpYd8lIGQBgp4rXzDWQTOhfILZm+nok= +github.com/hashicorp/go-slug v0.7.0/go.mod h1:Ib+IWBYfEfJGI1ZyXMGNbu2BU+aa3Dzu41RKLH301v4= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/go-syslog v1.0.0 h1:KaodqZuhUoZereWVIYmpUgZysurB1kBLX2j0MwMrUAE= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-tfe v1.25.1 h1:OxjDhY8Rj36n/uTSmhdFRLcnhXFfRTsopiovYSkJjak= -github.com/hashicorp/go-tfe v1.25.1/go.mod h1:1Y6nsdMuJ14lYdc1VMLl/erlthvMzUsJn+WYWaAdSc4= +github.com/hashicorp/go-tfe v0.20.0 h1:XUAhKoCX8ZUQfwBebC8hz7nkSSnqgNkaablIfxnZ0PQ= +github.com/hashicorp/go-tfe v0.20.0/go.mod h1:gyXLXbpBVxA2F/6opah8XBsOkZJxHYQmghl0OWi8keI= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.3.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= @@ -1822,8 +1104,6 @@ github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uG github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl v1.0.1-vault-5 h1:kI3hhbbyzr4dldA8UdTb7ZlVVlI2DACdCfz31RPDgJM= github.com/hashicorp/hcl v1.0.1-vault-5/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM= -github.com/hashicorp/hcl/v2 v2.16.2 h1:mpkHZh/Tv+xet3sy3F9Ld4FyI2tUpWe9x3XtPx9f1a0= -github.com/hashicorp/hcl/v2 v2.16.2/go.mod h1:JRmR89jycNkrrqnMmvPDMd56n1rQJ2Q6KocSLCMCXng= github.com/hashicorp/hcp-link v0.1.0 h1:F6F1cpADc+o5EBI5CbJn5RX4qdFSLpuA4fN69eeE5lQ= github.com/hashicorp/hcp-link v0.1.0/go.mod h1:BWVDuJDHrKJtWc5qI07bX5xlLjSgWq6kYLQUeG1g5dM= github.com/hashicorp/hcp-scada-provider v0.2.1 h1:yr+Uxini7SWTZ2t49d3Xi+6+X/rbsSFx8gq6WVcC91c= @@ -1834,17 +1114,15 @@ github.com/hashicorp/jsonapi v0.0.0-20210826224640-ee7dae0fb22d h1:9ARUJJ1VVynB1 github.com/hashicorp/jsonapi v0.0.0-20210826224640-ee7dae0fb22d/go.mod h1:Yog5+CPEM3c99L1CL2CFCYoSzgWm5vTU58idbRUaLik= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= github.com/hashicorp/mdns v1.0.4 h1:sY0CMhFmjIPDMlTB+HfymFHCaYLhgifZ0QhjaYKD/UQ= github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= github.com/hashicorp/memberlist v0.5.0 h1:EtYPN8DpAURiapus508I4n9CzHs2W+8NZGbmmR/prTM= github.com/hashicorp/memberlist v0.5.0/go.mod h1:yvyXLpo0QaGE59Y7hDTsTzDD25JYBZ4mHgHUZ8lrOI0= github.com/hashicorp/net-rpc-msgpackrpc/v2 v2.0.0 h1:kBpVVl1sl3MaSrs97e0+pDQhSrqJv9gVbSUrPpVfl1w= github.com/hashicorp/net-rpc-msgpackrpc/v2 v2.0.0/go.mod h1:6pdNz0vo0mF0GvhwDG56O3N18qBrAz/XRIcfINfTbwo= -github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf h1:cKXVf1UJqwdkGiTF3idqCOLApAql0310OSmJxeiaMWg= -github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf/go.mod h1:rb38DqjaaIfhJRiLeCAGgIt+wV7o78rB+liyFE3mVzE= +github.com/hashicorp/nomad/api v0.0.0-20220707195938-75f4c2237b28 h1:fo8EbQ6tc9hYqxik9CAdFMqy48TW8hh2I3znysPqf+0= +github.com/hashicorp/nomad/api v0.0.0-20220707195938-75f4c2237b28/go.mod h1:FslB+3eLbZgkuPWffqO1GeNzBFw1SuVqN2PXsMNe0Fg= github.com/hashicorp/raft v1.0.1/go.mod h1:DVSAWItjLjTOkVbSpWQ0j0kUADIvDaCtBxIcbNAQLkI= github.com/hashicorp/raft v1.1.0/go.mod h1:4Ak7FSPnuvmb0GV6vgIAJ4vYT4bek9bb6Q+7HVbyzqM= github.com/hashicorp/raft v1.1.2-0.20191002163536-9c6bd3e3eb17/go.mod h1:vPAJM8Asw6u8LxC3eJCUZmRP/E4QmUGE1R7g7k8sG/8= @@ -1858,98 +1136,89 @@ github.com/hashicorp/raft-boltdb/v2 v2.0.0-20210421194847-a7e34179d62c h1:oiKun9 github.com/hashicorp/raft-boltdb/v2 v2.0.0-20210421194847-a7e34179d62c/go.mod h1:kiPs9g148eLShc2TYagUAyKDnD+dH9U+CQKsXzlY9xo= github.com/hashicorp/raft-snapshot v1.0.4 h1:EuDuayAJPdiDmVk1ygTDnG2zDzrs0/6/yBuma1IYSow= github.com/hashicorp/raft-snapshot v1.0.4/go.mod h1:5sL9eUn72lH5DzsFIJ9jaysITbHksSSszImWSOTC8Ic= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hashicorp/serf v0.10.1 h1:Z1H2J60yRKvfDYAOZLd2MU0ND4AH/WDz7xYHDWQsIPY= github.com/hashicorp/serf v0.10.1/go.mod h1:yL2t6BqATOLGc5HF7qbFkTfXoPIY0WZdWHfEvMqbG+4= -github.com/hashicorp/vault-plugin-auth-alicloud v0.15.0 h1:R2SVwOeVLG5DXzUx42UWhjfFqS0Z9+ncfebPu+gO9VA= -github.com/hashicorp/vault-plugin-auth-alicloud v0.15.0/go.mod h1:YQXpa2s4rGYKm3Oa/Nkgh5SuGVfHFNEIUwDDYWyhloE= -github.com/hashicorp/vault-plugin-auth-azure v0.15.0 h1:OPK3rpRsWUQm/oo8l4N+YS7dka+lUHDT/qxTafSFPzY= -github.com/hashicorp/vault-plugin-auth-azure v0.15.0/go.mod h1:qRCibAYC0AV4s2+HxEwmLMPNLENK1kx2mrq9ldnGdkY= -github.com/hashicorp/vault-plugin-auth-centrify v0.15.1 h1:6StAr5tltpySNgyUwWC8czm9ZqkO7NIZfcRmxxtFwQ8= -github.com/hashicorp/vault-plugin-auth-centrify v0.15.1/go.mod h1:xXs4I5yLxbQ5VHcpvSxkRhShCTXd8Zyrni8qnFrfQ4Y= -github.com/hashicorp/vault-plugin-auth-cf v0.15.0 h1:zIVGlYXCRBY/ElucWdFC9xF27d2QMGMQPm9wSezGREI= -github.com/hashicorp/vault-plugin-auth-cf v0.15.0/go.mod h1:FEIjQkYmzno4MfU36MAjFUG9/JUWeMPxvBG5DRTMYVM= -github.com/hashicorp/vault-plugin-auth-gcp v0.16.0 h1:DA/ZDLCrUsbHS/7Xqkkw7l2SgbQE9rWEHLLWYTGu8rw= -github.com/hashicorp/vault-plugin-auth-gcp v0.16.0/go.mod h1:R0z/qdyxn0uq6hkKgux8KwenjV/n/CCaEz+qOF9GdPg= -github.com/hashicorp/vault-plugin-auth-jwt v0.16.0 h1:BUk03WDSGZuB+kEq3HTOQ7ecEH2Z1Idit42jfB5EnpE= -github.com/hashicorp/vault-plugin-auth-jwt v0.16.0/go.mod h1:Ve3r228afZOShwNvp+MGEKtm+ROskv10GG7bMXZb5OA= -github.com/hashicorp/vault-plugin-auth-kerberos v0.10.0 h1:YH2x9kIV0jKXk22tVkpydhmPeEgprC7IOfN8l0pjF6c= -github.com/hashicorp/vault-plugin-auth-kerberos v0.10.0/go.mod h1:I6ulXug4oxx77DFYjqI1kVl+72TgXEo3Oju4tTOVfU4= -github.com/hashicorp/vault-plugin-auth-kubernetes v0.16.0 h1:vuXNJvtMyoqQ01Sfwf2TNcJNkGcxP1vD3C7gpvuVkCU= -github.com/hashicorp/vault-plugin-auth-kubernetes v0.16.0/go.mod h1:onx9W/rDwENQkN+1yEnJvS51PVkkGAPOBXasne7lnnk= -github.com/hashicorp/vault-plugin-auth-oci v0.14.0 h1:B7uyigqgUAO3gebvi8mMmsq7l4QAG0bLEP6rAKyDVuw= -github.com/hashicorp/vault-plugin-auth-oci v0.14.0/go.mod h1:SYdTtQhzMxqOCbdC0E0UOrkc4eGXXcJmXXbe1MHVPtE= -github.com/hashicorp/vault-plugin-database-couchbase v0.9.2 h1:UWPWUADWUE08a3qeZixd/diIcNIm0NTqdPNTNbUljuQ= -github.com/hashicorp/vault-plugin-database-couchbase v0.9.2/go.mod h1:BvyZMbDEhvT4chbb7lgnL8xsVy9rF+hbDWuJ/eKkgpI= -github.com/hashicorp/vault-plugin-database-elasticsearch v0.13.2 h1:N81xJfdVjAo49dUu5Wo95C0fv5scpbYL9z4ykWeHxJg= -github.com/hashicorp/vault-plugin-database-elasticsearch v0.13.2/go.mod h1:P4cUbvtXgvfWZassvJzyXC4nIGRUO72ds9rE5WpQnuw= -github.com/hashicorp/vault-plugin-database-mongodbatlas v0.10.0 h1:fgsiuSq3AeFcYnbPkXOLSkKDrS2blaS/6MAmHEIAH28= -github.com/hashicorp/vault-plugin-database-mongodbatlas v0.10.0/go.mod h1:jH0OvjQ3Otg0HoOR5NugTqC3JA1KJ+J5OL0NdAzgSb4= -github.com/hashicorp/vault-plugin-database-redis v0.2.1 h1:E+UeZcpNtQO8nMfVebwE5ZS2sJpNjzbKwYJX1y8FFNk= -github.com/hashicorp/vault-plugin-database-redis v0.2.1/go.mod h1:T0i639Xnh2DY5ij8ofS83ZauBh8N0drKzqXYDrH87tM= -github.com/hashicorp/vault-plugin-database-redis-elasticache v0.2.1 h1:D8mdwkB6CyC37wkpdW9mgJNNrqral956bFoVj3AoQoE= -github.com/hashicorp/vault-plugin-database-redis-elasticache v0.2.1/go.mod h1:1RdJ0uxD8Mquzx9DBfoFKkmHgeZrPTN5nZHGyDrVCuY= -github.com/hashicorp/vault-plugin-database-snowflake v0.8.0 h1:Ec7gxxWIhxTmbKNXpmPgREra2go4H7QgDByIvtUwfFw= -github.com/hashicorp/vault-plugin-database-snowflake v0.8.0/go.mod h1:Qq2xNOGOOdHcX9Nr8H0CR4cNfanaGW3ANDrytpMjT1E= +github.com/hashicorp/vault-plugin-auth-alicloud v0.14.0 h1:O6tNk0s/arubLUbLeCyaRs5xGo9VwmbQazISY/BfPK4= +github.com/hashicorp/vault-plugin-auth-alicloud v0.14.0/go.mod h1:We3fJplmALwK1VpjwrLuXr/4QCQHYMdnXLHmLUU6Ntg= +github.com/hashicorp/vault-plugin-auth-azure v0.13.0 h1:1j8fPQPumYg9oZG+MCFftglu/Edd6YGOuBvEWEkK0qQ= +github.com/hashicorp/vault-plugin-auth-azure v0.13.0/go.mod h1:Kg7oDhyyROtBEe8NNLTvpfDSnaxqgEyUvKqlNor/4I4= +github.com/hashicorp/vault-plugin-auth-centrify v0.14.0 h1:qZKnqU1tX1WS6+11+PskGMhlXl5LnfkVrDvJO4BRY7s= +github.com/hashicorp/vault-plugin-auth-centrify v0.14.0/go.mod h1:3fDbIVdwA/hkOVhwktKHDX5lo4DqIUUVbBdwQNNvxHw= +github.com/hashicorp/vault-plugin-auth-cf v0.14.0 h1:n/ojZukcH8YAOy/7JXITJn21byr1yxhujlR3DKlR3FY= +github.com/hashicorp/vault-plugin-auth-cf v0.14.0/go.mod h1:BdvPbWtUuBhTW1HrYXj2OGoeAIzWENYsKF378RoKmw4= +github.com/hashicorp/vault-plugin-auth-gcp v0.15.0 h1:EmfbQkYufMSFcbnOyn0f7bv2QYyyQyMx/D+qO04jfr0= +github.com/hashicorp/vault-plugin-auth-gcp v0.15.0/go.mod h1:GvtgteMxgza9I/QXNKFOAW6/FX0FmsAOzE0nz5126H4= +github.com/hashicorp/vault-plugin-auth-jwt v0.15.2 h1:8Pa9ir5lNTBfO9KT5jrqOeRDhgBsWuWtZ79vv4/AIy4= +github.com/hashicorp/vault-plugin-auth-jwt v0.15.2/go.mod h1:cMm0kZEcMkvwMSfHXeM1obYnjkmeoHOq0dWizzDCxDQ= +github.com/hashicorp/vault-plugin-auth-kerberos v0.9.0 h1:gdbrEwpPICDt8xQ7C595M+DXaojHvkA9/AhCKbvE+jY= +github.com/hashicorp/vault-plugin-auth-kerberos v0.9.0/go.mod h1:dyGS9eHADGMJC42tTr+XliO2Ntssv4bUOK1Je9IEMMo= +github.com/hashicorp/vault-plugin-auth-kubernetes v0.15.0 h1:uHsn1fJqxGxbWiiD2resMYZzPJWPwPMCGNCEziGHfwE= +github.com/hashicorp/vault-plugin-auth-kubernetes v0.15.0/go.mod h1:f9r9pDAyVLgVTzJmvCz2m0OSYjcdJivnLv+5YWVv3F8= +github.com/hashicorp/vault-plugin-auth-oci v0.13.1 h1:xThaZC9jzZoqqccfxTk11hfwgqwN3yEZ3kYOnY2v2Fs= +github.com/hashicorp/vault-plugin-auth-oci v0.13.1/go.mod h1:O426Kf4nUXfwq+o0HqQuqpZygm6SiOY6eEXyjrZweYA= +github.com/hashicorp/vault-plugin-database-couchbase v0.9.0 h1:hJOHJ9yZ9kt1/DuRaU5Sa339j3/QcPL4esT9JLQonYA= +github.com/hashicorp/vault-plugin-database-couchbase v0.9.0/go.mod h1:skmG6MgIG6fjIOlOEgVKOcNlr1PcgHPUb9q1YQ5+Q9k= +github.com/hashicorp/vault-plugin-database-elasticsearch v0.13.1 h1:nVO6F8V69E2fAQklh/Ds+EypVMutN4iIlt3sat9qW9M= +github.com/hashicorp/vault-plugin-database-elasticsearch v0.13.1/go.mod h1:wO8EPQs5bsBERD6MSQ+7Az+YJ4TFclCNxBo3r3VKeao= +github.com/hashicorp/vault-plugin-database-mongodbatlas v0.9.0 h1:wlWrg1z5Pyx+FTUCOzA9yh0FTI+pfA9tMrsFPFBcjjA= +github.com/hashicorp/vault-plugin-database-mongodbatlas v0.9.0/go.mod h1:4Ew6RNnA1NXtpLV0ijkwpE6pJE46G+suDKnTVMm+kXA= +github.com/hashicorp/vault-plugin-database-redis v0.2.0 h1:Fg1inevnDhj58+/y5SY1CihLftytG1D+3QqbUJbHYUM= +github.com/hashicorp/vault-plugin-database-redis v0.2.0/go.mod h1:hPj1vvjzsJ+g9PChP7iKqEJX7ttr03oz/RDEYsq8zZY= +github.com/hashicorp/vault-plugin-database-redis-elasticache v0.2.0 h1:dgTT7E8xj56hjktMxHNAgFpy7pchpoQ20cIhDsBcgz8= +github.com/hashicorp/vault-plugin-database-redis-elasticache v0.2.0/go.mod h1:h7H9VAI3xdoJ3VQ+wCyFZ5AOyMIQDS7ZhdjN8LGX3OU= +github.com/hashicorp/vault-plugin-database-snowflake v0.7.0 h1:Od5M2ddxRiHjDkHFto+aInru44/6Dy4jjrxyoKh3AW4= +github.com/hashicorp/vault-plugin-database-snowflake v0.7.0/go.mod h1:QJ8IL/Qlu4Me1KkL0OpaWO7aMFL0TNoSEKVB5F+lCiM= github.com/hashicorp/vault-plugin-mock v0.16.1 h1:5QQvSUHxDjEEbrd2REOeacqyJnCLPD51IQzy71hx8P0= github.com/hashicorp/vault-plugin-mock v0.16.1/go.mod h1:83G4JKlOwUtxVourn5euQfze3ZWyXcUiLj2wqrKSDIM= -github.com/hashicorp/vault-plugin-secrets-ad v0.16.0 h1:6RCpd2PbBvmi5xmxXhggE0Xv+/Gag896/NNZeMKH+8A= -github.com/hashicorp/vault-plugin-secrets-ad v0.16.0/go.mod h1:6IeXly3xi+dVodzFSx6aVZjdhd3syboPyhxr1/WMcyo= -github.com/hashicorp/vault-plugin-secrets-alicloud v0.15.0 h1:uVpcx2s3PwYXSOHmjA/Ai6+V0c3wgvSApELZez8b9mI= -github.com/hashicorp/vault-plugin-secrets-alicloud v0.15.0/go.mod h1:wMTkhPGxDa2PCdSBqd6A8SMcRrltu3NRbwX8m8W1MCU= -github.com/hashicorp/vault-plugin-secrets-azure v0.16.0 h1:4Y2LG2P6XUy4HLlObJtHiveJBQwZ4kazs0EpxDmAal0= -github.com/hashicorp/vault-plugin-secrets-azure v0.16.0/go.mod h1:tNzshPyCxkuOL4PLF3cybN/XaSlWgvfl6dwEbCARybY= -github.com/hashicorp/vault-plugin-secrets-gcp v0.16.0 h1:5ozLtt38Bw/DLt37dbccT8j56A+2T7CWFfYecKleGl4= -github.com/hashicorp/vault-plugin-secrets-gcp v0.16.0/go.mod h1:Ax9/ALmpzyjU8mcqHVYR9lwjcyazdmimrShDYeK9CHc= -github.com/hashicorp/vault-plugin-secrets-gcpkms v0.15.0 h1:CueteKXEuO52qGu1nUaDc/euSTSfQD9MONkXuvWdZQw= -github.com/hashicorp/vault-plugin-secrets-gcpkms v0.15.0/go.mod h1:a0Z2DVGd2SPPwLb8edXeHyer3CXei/Y0cb7EFkiFMfA= -github.com/hashicorp/vault-plugin-secrets-kubernetes v0.5.0 h1:g0W1ybHjO945jDtuDEFcqTINyW/s06wxZarE/7aLumc= -github.com/hashicorp/vault-plugin-secrets-kubernetes v0.5.0/go.mod h1:2wobeIypBESGQYmhv12vuAorCvfETHpBoMyrb+6QTmQ= -github.com/hashicorp/vault-plugin-secrets-kv v0.15.0 h1:S2d1t4m4ilDNJRdMUzNUimvyu/+ll8huq5QncVgYz+s= -github.com/hashicorp/vault-plugin-secrets-kv v0.15.0/go.mod h1:xu/eiT+BB2b2Gh/AZFJ1xCS8E7S29gOQcuh9VMxros8= -github.com/hashicorp/vault-plugin-secrets-mongodbatlas v0.10.0 h1:FB860wKclwLBvBHkQb5nq8bGMUAsuw0khrYT1RM0NR0= -github.com/hashicorp/vault-plugin-secrets-mongodbatlas v0.10.0/go.mod h1:6YPhFm57C3DvPueHEGdTLK94g3gZI/gdiRrSwO5Fym8= -github.com/hashicorp/vault-plugin-secrets-openldap v0.11.0 h1:8J8u7uWLifj3uF5tot9Qj74H8vEwPMNKN+XTLLgSmDw= -github.com/hashicorp/vault-plugin-secrets-openldap v0.11.0/go.mod h1:JVulYJNiG7s3pjwo9HAnq07ViWtGWkz2WAw8ytle+0w= -github.com/hashicorp/vault-plugin-secrets-terraform v0.7.1 h1:Icb3EDpNvb4ltnGff2Zrm3JVNDDdbbL2wdA2LouD2KQ= -github.com/hashicorp/vault-plugin-secrets-terraform v0.7.1/go.mod h1:JHHo1nWOgYPsbTqE/PVwkTKRkLSlPSqo9RBqZ7NLKB8= -github.com/hashicorp/vault-testing-stepwise v0.1.3 h1:GYvm98EB4nUKUntkBcLicnKsebeV89KPHmAGJUCPU/c= -github.com/hashicorp/vault-testing-stepwise v0.1.3/go.mod h1:Ym1T/kMM2sT6qgCIIJ3an7uaSWCJ8O7ohsWB9UiB5tI= +github.com/hashicorp/vault-plugin-secrets-ad v0.15.0 h1:4y/CtX4977uJXPWh5d70Raw5Mo+kCGDo9de2A6cOFso= +github.com/hashicorp/vault-plugin-secrets-ad v0.15.0/go.mod h1:+HVm4DDDc66fzFvL9FrgM/6ByVWR8eK3OA1050EjmOw= +github.com/hashicorp/vault-plugin-secrets-alicloud v0.14.1 h1:kFcdTltTe5HP0ILuB+YNw++Iy/PZMLv/i2FsmdXfGfM= +github.com/hashicorp/vault-plugin-secrets-alicloud v0.14.1/go.mod h1:sSjBgGh3o9cvMvpNC5K0DL+CndPL4fbsseR/pLiMlb8= +github.com/hashicorp/vault-plugin-secrets-azure v0.15.0 h1:R/3KLTOwvPIZenMrmeSIBWymKq5nYgA/bucXzBPyb3Q= +github.com/hashicorp/vault-plugin-secrets-azure v0.15.0/go.mod h1:frXRdkP8NFYLRIPLQsfIBKMaDrCmHJjv65N9QqAkN1w= +github.com/hashicorp/vault-plugin-secrets-gcp v0.15.0 h1:SfYIFmgFg/8p4fgLCV8YxxkI+iQN0c4gSjMJhg9vFJw= +github.com/hashicorp/vault-plugin-secrets-gcp v0.15.0/go.mod h1:/eOk7gJ5zvmOKgP5Ih7/5rZm5jOKDvGFpANIRqbr/Mc= +github.com/hashicorp/vault-plugin-secrets-gcpkms v0.14.0 h1:eUC5ltK+1bkc+SVMzAUq4tBeNrsDXyCuITH8jeajXcM= +github.com/hashicorp/vault-plugin-secrets-gcpkms v0.14.0/go.mod h1:86YCY86XuiQesV1jfjnV4icgoaxQdoUHONzDru+XQHA= +github.com/hashicorp/vault-plugin-secrets-kubernetes v0.3.0 h1:Joz9SBwjpEOGu+Ynv60JC3fAA4UuLJzu7NcrKm6wMMs= +github.com/hashicorp/vault-plugin-secrets-kubernetes v0.3.0/go.mod h1:NJeYBRgLVqjvkrVyZEe42oaqP3+xvVNMYdJoMWVoByU= +github.com/hashicorp/vault-plugin-secrets-kv v0.14.2 h1:13p50RIltQM/JH32uWZe9sAp16Uaj0zCLmVGPvS09qo= +github.com/hashicorp/vault-plugin-secrets-kv v0.14.2/go.mod h1:cAxt2o3BjRT5CbNLtgXuxTReaejvrgN/qk+no+DnwJ8= +github.com/hashicorp/vault-plugin-secrets-mongodbatlas v0.9.1 h1:WkW8fyHxEdz1wGSTxCnSCrzXvgLXqXr8Iqp7upa/s4E= +github.com/hashicorp/vault-plugin-secrets-mongodbatlas v0.9.1/go.mod h1:p96IECNtVwpvTq8RAw3dLlAYRWpG1n06XOoo0TkJnuk= +github.com/hashicorp/vault-plugin-secrets-openldap v0.10.1 h1:EN3/iEjPPmcpX9yihybQNHvewc+YoJw7aoKsio1WK5s= +github.com/hashicorp/vault-plugin-secrets-openldap v0.10.1/go.mod h1:sYuxnuNY2O59fy+LACtvgrqUO/r0cnhAYTMqLajD9FE= +github.com/hashicorp/vault-plugin-secrets-terraform v0.7.0 h1:jgJpVKhV0Eh6EjpUEIf7VYH2D6D0xW2Lry9/3PI8hy0= +github.com/hashicorp/vault-plugin-secrets-terraform v0.7.0/go.mod h1:GzYAJYytgbNNyT3S7rspz1cLE53E1oajFbEtaDUlVGU= +github.com/hashicorp/vault-testing-stepwise v0.1.1/go.mod h1:3vUYn6D0ZadvstNO3YQQlIcp7u1a19MdoOC0NQ0yaOE= +github.com/hashicorp/vault-testing-stepwise v0.1.3-0.20230203193428-3a789cb2c68f h1:7ASzAq/gPihP/HAd8RmS9c7LiJtDMnjkPbfWKhm/XiI= +github.com/hashicorp/vault-testing-stepwise v0.1.3-0.20230203193428-3a789cb2c68f/go.mod h1:8zCPiYcaasC/X/OR+NjbvaO48k1enp+WfhiJLJ/rkJ8= github.com/hashicorp/vault/vault/hcp_link/proto v0.0.0-20230201201504-b741fa893d77 h1:Y/+BtwxmRak3Us9jrByARvYW6uNeqZlEpMylIdXVIjY= github.com/hashicorp/vault/vault/hcp_link/proto v0.0.0-20230201201504-b741fa893d77/go.mod h1:a2crHoMWwY6aiL8GWT8hYj7vKD64uX0EdRPbnsHF5wU= github.com/hashicorp/vic v1.5.1-0.20190403131502-bbfe86ec9443 h1:O/pT5C1Q3mVXMyuqg7yuAWUg/jMZR1/0QTzTRdNR6Uw= github.com/hashicorp/vic v1.5.1-0.20190403131502-bbfe86ec9443/go.mod h1:bEpDU35nTu0ey1EXjwNwPjI9xErAsoOCmcMb9GKvyxo= github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= +github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87 h1:xixZ2bWeofWV68J+x6AzmKuVM/JWCQwkWm6GW/MUR6I= github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= -github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= -github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= -github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk= github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg= -github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= -github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/influxdata/influxdb v1.7.6/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY= github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab h1:HqW4xhhynfjrtEiiSGcQUd6vrK23iMam1FO8rI7mwig= github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= -github.com/intel/goresctrl v0.2.0/go.mod h1:+CZdzouYFn5EsxgqAQTEzMfwKwuc0fVdMrT9FCCAVRQ= -github.com/intel/goresctrl v0.3.0/go.mod h1:fdz3mD85cmP9sHD8JUlrNWAxvwM86CrbmVXltEKd7zk= github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA= -github.com/j-keck/arping v1.0.2/go.mod h1:aJbELhR92bSk7tp79AWM/ftfc90EfEi2bQJrbBFOsPw= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= @@ -2011,8 +1280,8 @@ github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVET github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o= github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= -github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8= -github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= +github.com/jcmturner/gokrb5/v8 v8.4.3 h1:iTonLeSJOn7MVUtyMT+arAn5AKAPrkilzhGw8wE/Tq8= +github.com/jcmturner/gokrb5/v8 v8.4.3/go.mod h1:dqRwJGXznQrzw6cWmyo6kH+E7jksEQG/CyVWsJEsJO0= github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= github.com/jeffchao/backoff v0.0.0-20140404060208-9d7fd7aa17f2 h1:mex1izRBCD+7WjieGgRdy7e651vD/lvB1bD9vNE/3K4= @@ -2035,7 +1304,6 @@ github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfC github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/joyent/triton-go v0.0.0-20180628001255-830d2b111e62/go.mod h1:U+RSyWxWd04xTqnuOQxnai7XGS2PrPY2cfGoDKtMHjA= @@ -2057,36 +1325,26 @@ github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7 github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= -github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= -github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= -github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= +github.com/klauspost/compress v1.15.15 h1:EF27CXIuDsYJ6mmvtBRlEuB2UVOqHG1tAXgZ7yIO+lw= github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4= -github.com/klauspost/compress v1.16.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI= -github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= -github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -2097,9 +1355,8 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= -github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= @@ -2110,17 +1367,12 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0 github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= -github.com/lestrrat-go/backoff/v2 v2.0.8/go.mod h1:rHP/q/r9aT27n24JQLa7JhSQZCKBBOiM/uP402WwN8Y= -github.com/lestrrat-go/blackmagic v1.0.0/go.mod h1:TNgH//0vYSs8VXDCfkZLgIrVTTXQELZffUV0tz3MtdQ= -github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E= -github.com/lestrrat-go/iter v1.0.1/go.mod h1:zIdgO1mRKhn8l9vrZJZz9TUMMFbQbLeTsbqPDrJ/OJc= -github.com/lestrrat-go/jwx v1.2.25/go.mod h1:zoNuZymNl5lgdcu6P7K6ie2QRll5HVfF4xwxBBK1NxY= -github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= github.com/lib/pq v0.0.0-20180327071824-d34b9ff171c2/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= @@ -2130,9 +1382,7 @@ github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3/go.mod h1:3r6x7q github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= -github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= @@ -2140,9 +1390,8 @@ github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= github.com/mailru/easyjson v0.7.1/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= +github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= -github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= @@ -2168,41 +1417,30 @@ github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOA github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= github.com/mattn/go-shellwords v1.0.6/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= -github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= -github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/matttproud/golang_protobuf_extensions v1.0.2/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY= -github.com/mediocregopher/radix/v4 v4.1.2 h1:Pj7XnNK5WuzzFy63g98pnccainAePK+aZNQRvxSvj2I= -github.com/mediocregopher/radix/v4 v4.1.2/go.mod h1:ajchozX/6ELmydxWeWM6xCFHVpZ4+67LXHOTOVR0nCE= +github.com/mediocregopher/radix/v4 v4.1.1 h1:JkZBEp0y8pWGNZkmO3RR5oEO5huwd4zKKt4rh1C+P8s= +github.com/mediocregopher/radix/v4 v4.1.1/go.mod h1:ajchozX/6ELmydxWeWM6xCFHVpZ4+67LXHOTOVR0nCE= github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo= github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4= github.com/michaelklishin/rabbit-hole/v2 v2.12.0 h1:946p6jOYFcVJdtBBX8MwXvuBkpPjwm1Nm2Qg8oX+uFk= github.com/michaelklishin/rabbit-hole/v2 v2.12.0/go.mod h1:AN/3zyz7d++OHf+4WUo/LR0+Q5nlPHMaXasIsG/mPY0= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.25/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41 h1:WMszZWJG0XmzbK9FEmzH2TVcqYzFesusSIB41b8KHxY= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg= -github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4= github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= -github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mikesmitty/edkey v0.0.0-20170222072505-3356ea4e686a h1:eU8j/ClY2Ty3qdHnn0TyW3ivFoPC/0F1gQZz8yTxbbE= github.com/mikesmitty/edkey v0.0.0-20170222072505-3356ea4e686a/go.mod h1:v8eSC2SMp9/7FTKUncp7fH9IwPfw+ysMObcEz5FWheQ= -github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= -github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= -github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM= github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= @@ -2213,7 +1451,6 @@ github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0 github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= @@ -2222,17 +1459,14 @@ github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJ github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= github.com/mitchellh/hashstructure v1.1.0 h1:P6P1hdjqAAknpY/M1CGipelZgp+4y9ja9kmUZPXP+H0= github.com/mitchellh/hashstructure v1.1.0/go.mod h1:xUDAozZz0Wmdiufv0uyhnHkUTN6/6d8ulp4AwfLKrmA= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.3.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= @@ -2241,27 +1475,19 @@ github.com/mitchellh/pointerstructure v1.2.1/go.mod h1:BRAsLI5zgXmw97Lf6s25bs8oh github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/mndrix/tap-go v0.0.0-20171203230836-629fa407e90b/go.mod h1:pzzDgJWZ34fGzaAZGFW22KVZDfyrYW+QABMrWnJBnSs= github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= -github.com/moby/patternmatcher v0.5.0 h1:YCZgJOeULcxLw1Q+sVR636pmS7sPEn1Qo2iAN6M7DBo= -github.com/moby/patternmatcher v0.5.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= -github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= +github.com/moby/sys/mount v0.2.0 h1:WhCW5B355jtxndN5ovugJlMFJawbUODuW8fSnEH6SSM= +github.com/moby/sys/mount v0.2.0/go.mod h1:aAivFE2LB3W4bACsUXChRHQ0qKWsetY4Y9V7sxOougM= github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= github.com/moby/sys/mountinfo v0.4.1/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= +github.com/moby/sys/mountinfo v0.5.0 h1:2Ks8/r6lopsxWi9m58nlwjaeSzUX9iiL1vj5qB/9ObI= github.com/moby/sys/mountinfo v0.5.0/go.mod h1:3bMD3Rg+zkqx8MRYPi7Pyb0Ie97QEBmdxbhnCLlSvSU= -github.com/moby/sys/mountinfo v0.6.2/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI= -github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= -github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= -github.com/moby/sys/signal v0.6.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= -github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= -github.com/moby/sys/symlink v0.2.0/go.mod h1:7uZVF2dqJjG/NsClqul95CqKOBRQyYSNnJ6BMgR/gFs= github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc= -github.com/moby/term v0.0.0-20210610120745-9d4ed1856297/go.mod h1:vgPCkQMyxTZ7IDy8SXRufE172gr8+K/JE/7hHFxHW3A= +github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae h1:O4SWKdcHVCvYqyDV+9CJA1fcDN2L11Bule0iFy3YlAI= github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= -github.com/moby/term v0.0.0-20221205130635-1aeaba878587 h1:HfkjXDfhgVaN5rmueG8cL8KKeFNecRCXFhaJ2qZ5SKA= -github.com/moby/term v0.0.0-20221205130635-1aeaba878587/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -2270,17 +1496,13 @@ github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3Rllmb github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= -github.com/mongodb-forks/digest v1.0.4 h1:9FrGTc7MGAchgaQBcXBnEwUM/Oo8obW7OGWxnsSvZ64= -github.com/mongodb-forks/digest v1.0.4/go.mod h1:eHRfgovT+dvSFfltrOa27hy1oR/rcwyDdp5H1ZQxEMA= +github.com/mongodb-forks/digest v1.0.3 h1:ZUK1vyZnBiRMvET0O1SzmnBmv935CkcOTjhfR4zIQ2s= +github.com/mongodb-forks/digest v1.0.3/go.mod h1:eHRfgovT+dvSFfltrOa27hy1oR/rcwyDdp5H1ZQxEMA= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= -github.com/montanaflynn/stats v0.7.0 h1:r3y12KyNxj/Sb/iOE46ws+3mS1+MZca1wlHQFPsY/JU= -github.com/montanaflynn/stats v0.7.0/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= -github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= -github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= @@ -2291,7 +1513,6 @@ github.com/natefinch/atomic v0.0.0-20150920032501-a62ce929ffcc h1:7xGrl4tTpBQu5Z github.com/natefinch/atomic v0.0.0-20150920032501-a62ce929ffcc/go.mod h1:1rLVY/DWf3U6vSZgH16S7pymfrhK2lcUlXjgGglw/lY= github.com/ncw/swift v1.0.47 h1:4DQRPj35Y41WogBxyhOXlrI37nzGlyEcsforeudyYPQ= github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= -github.com/networkplumbing/go-nft v0.2.0/go.mod h1:HnnM+tYvlGAsMU7yoYwXEVLLiDW9gdMmb5HoGcwpuQs= github.com/nicolai86/scaleway-sdk v1.10.2-0.20180628010248-798f60e20bb2 h1:BQ1HW7hr4IVovMwWg0E0PYcyW8CzqDcVmaew9cujU4s= github.com/nicolai86/scaleway-sdk v1.10.2-0.20180628010248-798f60e20bb2/go.mod h1:TLb2Sg7HQcgGdloNxkrmtgDNR9uVYF3lfdFIN4Ro6Sk= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= @@ -2309,7 +1530,6 @@ github.com/okta/okta-sdk-golang/v2 v2.12.1 h1:U+smE7trkHSZO8Mval3Ow85dbxawO+pMAr github.com/okta/okta-sdk-golang/v2 v2.12.1/go.mod h1:KRoAArk1H216oiRnQT77UN6JAhBOnOWkK27yA1SM7FQ= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/olekukonko/tablewriter v0.0.0-20180130162743-b8a9be070da4/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= @@ -2319,19 +1539,10 @@ github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+ github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.13.0/go.mod h1:+REjRxOmWfHCjfv9TTWB1jD1Frx4XydAD3zm1lskyM0= -github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= -github.com/onsi/ginkgo/v2 v2.1.4/go.mod h1:um6tUpWM/cxCK3/FK8BXqEiUMUwRgSM4JXG47RKZmLU= -github.com/onsi/ginkgo/v2 v2.1.6/go.mod h1:MEH45j8TBi6u9BMogfbp0stKC5cdGjumZj5Y7AG4VIk= -github.com/onsi/ginkgo/v2 v2.3.0/go.mod h1:Eew0uilEqZmIEZr8JrvYlvOM7Rr6xzTmMV8AyFNU9d0= -github.com/onsi/ginkgo/v2 v2.4.0/go.mod h1:iHkDK1fKGcBoEHT5W7YBq4RFWaQulw+caOMkAt4OrFo= -github.com/onsi/ginkgo/v2 v2.5.0/go.mod h1:Luc4sArBICYCS8THh8v3i3i5CuSZO+RaQRaJoeNwomw= -github.com/onsi/ginkgo/v2 v2.6.1/go.mod h1:yjiuMwPokqY1XauOgju45q3sJt6VzQ/Fict1LFVcsAo= -github.com/onsi/ginkgo/v2 v2.9.1 h1:zie5Ly042PD3bsCvsSOPvRnFwyo3rKe64TJlD6nu0mk= +github.com/onsi/ginkgo/v2 v2.1.6 h1:Fx2POJZfKRQcM1pH49qSZiYeu319wji004qX+GDovrU= github.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= @@ -2343,17 +1554,7 @@ github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1y github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= github.com/onsi/gomega v1.15.0/go.mod h1:cIuvLEne0aoVhAgh/O6ac0Op8WWw9H6eYCriF+tEHG0= github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= -github.com/onsi/gomega v1.20.1/go.mod h1:DtrZpjmvpn2mPm4YWQa0/ALMDj9v4YxLgojwPeREyVo= -github.com/onsi/gomega v1.21.1/go.mod h1:iYAIXgPSaDHak0LCMA+AWBpIKBr8WZicMxnE8luStNc= -github.com/onsi/gomega v1.22.1/go.mod h1:x6n7VNe4hw0vkyYUM4mjIXx3JbLiPaBPNgB7PRQ1tuM= -github.com/onsi/gomega v1.23.0/go.mod h1:Z/NWtiqwBrwUt4/2loMmHL63EDLnYHmVbuBpDr2vQAg= -github.com/onsi/gomega v1.24.0/go.mod h1:Z/NWtiqwBrwUt4/2loMmHL63EDLnYHmVbuBpDr2vQAg= -github.com/onsi/gomega v1.24.1/go.mod h1:3AOiACssS3/MajrniINInwbfOOtfZvplPzuRSmvt1jM= -github.com/onsi/gomega v1.24.2/go.mod h1:gs3J10IS7Z7r7eXRoNJIrNqU4ToQukCJhFtKrWgHWnk= -github.com/onsi/gomega v1.27.4 h1:Z2AnStgsdSayCMDiCU42qIz+HLqEPcgiOCXjAU/w+8E= -github.com/open-policy-agent/opa v0.42.2/go.mod h1:MrmoTi/BsKWT58kXlVayBb+rYVeaMwuBm3nYAN3923s= +github.com/onsi/gomega v1.20.1 h1:PA/3qinGoukvymdIDV8pii6tiZgC8kbmJO6Z5+b002Q= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= @@ -2362,43 +1563,29 @@ github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8 github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.0.2-0.20211117181255-693428a734f5/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM= github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.1.0-rc2/go.mod h1:3OVijpioIKYWTqjiG0zfF6wvoJ4fAXGbjdZuI2NgsRQ= -github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b h1:YWuSjZCQAPM8UUBLkYUk1e+rZcvWHJmFb6i6rM44Xs8= -github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b/go.mod h1:3OVijpioIKYWTqjiG0zfF6wvoJ4fAXGbjdZuI2NgsRQ= github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v1.0.0-rc9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v1.0.0-rc93/go.mod h1:3NOsor4w32B2tC0Zbl8Knk4Wg84SM2ImC1fxBuqJ/H0= github.com/opencontainers/runc v1.0.2/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0= -github.com/opencontainers/runc v1.1.0/go.mod h1:Tj1hFw6eFWp/o33uxGf5yF2BX5yz2Z6iptFpuvbbKqc= -github.com/opencontainers/runc v1.1.2/go.mod h1:Tj1hFw6eFWp/o33uxGf5yF2BX5yz2Z6iptFpuvbbKqc= +github.com/opencontainers/runc v1.1.4 h1:nRCz/8sKg6K6jgYAFLDlXzPeITBZJyX28DBVhWD+5dg= github.com/opencontainers/runc v1.1.4/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= -github.com/opencontainers/runc v1.1.5/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= -github.com/opencontainers/runc v1.1.6 h1:XbhB8IfG/EsnhNvZtNdLB0GBw92GYEFvKlhaJk9jUgA= -github.com/opencontainers/runc v1.1.6/go.mod h1:CbUumNnWCuTGFukNXahoo/RFBZvDAgRh/smNYNOhA50= github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.2-0.20190207185410-29686dbc5559/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.3-0.20200929063507-e6143ca7d51d/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.0.3-0.20220825212826-86290f6a00fb/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.1.0-rc.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs= -github.com/opencontainers/runtime-tools v0.9.0/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs= -github.com/opencontainers/runtime-tools v0.9.1-0.20221107090550-2e043c6bd626/go.mod h1:BRHJJd0E+cx42OybVYSgUvZmU0B8P9gZuRXlZUP7TKI= github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE= github.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo= github.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8= -github.com/opencontainers/selinux v1.9.1/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= github.com/opencontainers/selinux v1.10.0/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= -github.com/opencontainers/selinux v1.10.1/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= -github.com/opencontainers/selinux v1.11.0/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/openlyinc/pointy v1.1.2 h1:LywVV2BWC5Sp5v7FoP4bUD+2Yn5k0VNeRbU5vq9jUMY= +github.com/openlyinc/pointy v1.1.2/go.mod h1:w2Sytx+0FVuMKn37xpXIAyBNhFNBIJGR/v2m7ik1WtM= github.com/opentracing/opentracing-go v1.2.1-0.20220228012449-10b1cf09e00b h1:FfH+VrHHk6Lxt9HdVS0PXzSXFyS2NbZKXv33FYPol0A= github.com/opentracing/opentracing-go v1.2.1-0.20220228012449-10b1cf09e00b/go.mod h1:AC62GU6hc0BrNm+9RK9VSiwa/EUe1bkIeFORAMcHvJU= github.com/oracle/oci-go-sdk v24.3.0+incompatible h1:x4mcfb4agelf1O4/1/auGlZ1lr97jXRSSN5MxTgG/zU= @@ -2407,8 +1594,9 @@ github.com/oracle/oci-go-sdk/v60 v60.0.0 h1:EJAWjEi4SY5Raha6iUzq4LTQ0uM5YFw/wat/ github.com/oracle/oci-go-sdk/v60 v60.0.0/go.mod h1:krz+2gkSzlSL/L4PvP0Z9pZpag9HYLNtsMd1PmxlA2w= github.com/ory/dockertest v3.3.5+incompatible h1:iLLK6SQwIhcbrG783Dghaaa3WPzGc+4Emza6EbVUUGA= github.com/ory/dockertest v3.3.5+incompatible/go.mod h1:1vX4m9wsvi00u5bseYwXaSnhNrne+V0E6LAcBILJdPs= -github.com/ory/dockertest/v3 v3.10.0 h1:4K3z2VMe8Woe++invjaTB7VRyQXQy5UY+loujO4aNE4= -github.com/ory/dockertest/v3 v3.10.0/go.mod h1:nr57ZbRWMqfsdGdFNLHz5jjNdDb7VVFnzAeW1n5N1Lg= +github.com/ory/dockertest/v3 v3.8.0/go.mod h1:9zPATATlWQru+ynXP+DytBQrsXV7Tmlx7K86H6fQaDo= +github.com/ory/dockertest/v3 v3.9.1 h1:v4dkG+dlu76goxMiTT2j8zV7s4oPPEppKT8K8p2f1kY= +github.com/ory/dockertest/v3 v3.9.1/go.mod h1:42Ir9hmvaAPm0Mgibk6mBPi7SFvTXxEcnztDYOJ//uM= github.com/oxtoacart/bpool v0.0.0-20150712133111-4e1c5567d7c2 h1:CXwSGu/LYmbjEab5aMCs5usQRVBGThelUKBNnoSOuso= github.com/oxtoacart/bpool v0.0.0-20150712133111-4e1c5567d7c2/go.mod h1:L3UMQOThbttwfYRNFOWLLVXMhk5Lkio4GGOtw5UrxS0= github.com/packethost/packngo v0.1.1-0.20180711074735-b9cb5096f54c h1:vwpFWvAO8DeIZfFeqASzZfsxuWPno9ncAebBEP0N3uE= @@ -2423,30 +1611,22 @@ github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/9 github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/peterh/liner v0.0.0-20170211195444-bf27d3ba8e1d/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 h1:q2e307iGHPdTGp0hoxKjt1H5pDo6utceo3dQVK3I5XQ= github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o= -github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= -github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= -github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= -github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/pierrec/lz4 v2.5.2+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM= github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4= github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pierrec/lz4/v4 v4.1.16/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc= -github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pires/go-proxyproto v0.6.1 h1:EBupykFmo22SDjv4fQVQd2J9NOoLPmyZA/15ldOGkPw= github.com/pires/go-proxyproto v0.6.1/go.mod h1:Odh9VFOZJCf9G8cLW5o435Xf1J95Jw9Gw5rnCjcwzAY= github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= +github.com/pkg/browser v0.0.0-20210706143420-7d21f8c997e2/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -2454,7 +1634,6 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= -github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -2468,7 +1647,6 @@ github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8 github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI= github.com/pquerna/otp v1.2.1-0.20191009055518-468c2dd2b58d h1:PinQItctnaL2LtkaSM678+ZLLy5TajwOeXzWvYC7tII= github.com/pquerna/otp v1.2.1-0.20191009055518-468c2dd2b58d/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg= -github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM= @@ -2478,19 +1656,14 @@ github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQ github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.11.1 h1:+4eQaD7vAZ6DsfsxB15hbE0odUjGI5ARs9yskGu1v4s= github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.12.2/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= -github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= -github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= -github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= @@ -2499,11 +1672,8 @@ github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8 github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0 h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.30.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= -github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= @@ -2515,15 +1685,11 @@ github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDa github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= -github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/rboyer/safeio v0.2.1 h1:05xhhdRNAdS3apYm7JRjOqngf4xruaW959jmRxGDuSU= github.com/rboyer/safeio v0.2.1/go.mod h1:Cq/cEPK+YXFn622lsQ0K4KsPZSPtaptHHEldsy7Fmig= -github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= -github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/renier/xmlrpc v0.0.0-20170708154548-ce4a1a486c03 h1:Wdi9nwnhFNAlseAOekn6B5G/+GMtks9UKbvRU/CMM/o= github.com/renier/xmlrpc v0.0.0-20170708154548-ce4a1a486c03/go.mod h1:gRAiPF5C5Nd0eyyRdqIu9qTiFSoZzpTq727b5B8fkkU= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= @@ -2533,53 +1699,39 @@ github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= -github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/zerolog v1.4.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= -github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= -github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/columnize v2.1.0+incompatible h1:j1Wcmh8OrK4Q7GXY+V7SVSY8nUWQxHW5TkBe7YUl+2s= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= -github.com/safchain/ethtool v0.0.0-20210803160452-9aa261dae9b1/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= -github.com/safchain/ethtool v0.2.0/go.mod h1:WkKB1DnNtvsMlDmQ50sgwowDJV/hGbJSOvJoEXs1AJQ= github.com/sasha-s/go-deadlock v0.2.0 h1:lMqc+fUb7RrFS3gQLtoQsJ7/6TV/pAIFvBsqX73DK8Y= github.com/sasha-s/go-deadlock v0.2.0/go.mod h1:StQn567HiB1fF2yJ44N9au7wOhrPS3iZqiDbRupzT10= github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw= github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U= github.com/sean-/conswriter v0.0.0-20180208195008-f5ae3917a627/go.mod h1:7zjs06qF79/FKAJpBvFx3P8Ww4UTIMAe+lpNXDHziac= github.com/sean-/pager v0.0.0-20180208200047-666be9bf53b5/go.mod h1:BeybITEsBEg6qbIiqJ6/Bqeq25bCLbL7YFmpaFfJDuM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= -github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sethvargo/go-limiter v0.7.1 h1:wWNhTj0pxjyJ7wuJHpRJpYwJn+bUnjYfw2a85eu5w9U= github.com/sethvargo/go-limiter v0.7.1/go.mod h1:C0kbSFbiriE5k2FFOe18M1YZbAR2Fiwf72uGu0CXCcU= github.com/shirou/gopsutil/v3 v3.22.6 h1:FnHOFOh+cYAM0C30P+zysPISzlknLC5Z1G4EAElznfQ= github.com/shirou/gopsutil/v3 v3.22.6/go.mod h1:EdIubSnZhbAvBS1yJ7Xi+AShB/hxwLHOMz4MCYz7yMs= -github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= -github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= @@ -2599,34 +1751,24 @@ github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1 github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/snowflakedb/gosnowflake v1.6.18 h1:mm4KYvp3LWGHIuACwX/tHv9qDs2NdLDXuK0Rep+vfJc= -github.com/snowflakedb/gosnowflake v1.6.18/go.mod h1:BhNDWNSUY+t4T8GBuOg3ckWC4v5hhGlLovqGcF8Rkac= +github.com/snowflakedb/gosnowflake v1.6.3 h1:EJDdDi74YbYt1ty164ge3fMZ0eVZ6KA7b1zmAa/wnRo= +github.com/snowflakedb/gosnowflake v1.6.3/go.mod h1:6hLajn6yxuJ4xUHZegMekpq9rnQbGJ7TMwXjgTmA6lg= github.com/softlayer/softlayer-go v0.0.0-20180806151055-260589d94c7d h1:bVQRCxQvfjNUeRqaY/uT0tFuvuFY0ulgnczuR684Xic= github.com/softlayer/softlayer-go v0.0.0-20180806151055-260589d94c7d/go.mod h1:Cw4GTlQccdRGSEf6KiMju767x0NEHE0YIVPJSaXjlsw= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0= github.com/sony/gobreaker v0.4.2-0.20210216022020-dd874f9dd33b h1:br+bPNZsJWKicw/5rALEo67QHs5weyD5tf8WST+4sJ0= github.com/sony/gobreaker v0.4.2-0.20210216022020-dd874f9dd33b/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.2.1/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= -github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= -github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo= -github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= -github.com/spf13/cobra v1.5.0/go.mod h1:dWXEIy2H428czQCjInthrTRUg7yKbok+2Qi/yBIJoUM= -github.com/spf13/cobra v1.6.0/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= @@ -2636,7 +1778,6 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= -github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/streadway/amqp v1.0.0 h1:kuuDrUJFZL1QYL9hUNuCxNObNzB0bV/ZG5jV3RWAQgo= @@ -2645,7 +1786,6 @@ github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= @@ -2661,29 +1801,24 @@ github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1F github.com/stretchr/testify v1.7.5/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I= -github.com/tchap/go-patricia/v2 v2.3.1/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k= github.com/tencentcloud/tencentcloud-sdk-go v1.0.162 h1:8fDzz4GuVg4skjY2B0nMN7h6uN61EDVkuLyI2+qGHhI= github.com/tencentcloud/tencentcloud-sdk-go v1.0.162/go.mod h1:asUz5BPXxgoPGaRgZaVm1iGcUAuHyYUo1nXqKa83cvI= github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tilinna/clock v1.0.2 h1:6BO2tyAC9JbPExKH/z9zl44FLu1lImh3nDNKA0kgrkI= github.com/tilinna/clock v1.0.2/go.mod h1:ZsP7BcY7sEEz7ktc0IVy8Us6boDrK8VradlKRUGfOao= -github.com/tilinna/clock v1.1.0 h1:6IQQQCo6KoBxVudv6gwtY8o4eDfhHo8ojA5dP0MfhSs= -github.com/tilinna/clock v1.1.0/go.mod h1:ZsP7BcY7sEEz7ktc0IVy8Us6boDrK8VradlKRUGfOao= github.com/tklauser/go-sysconf v0.3.10 h1:IJ1AZGZRWbY8T5Vfk04D9WOA5WSejdflXxP03OUqALw= github.com/tklauser/go-sysconf v0.3.10/go.mod h1:C8XykCvCb+Gn0oNCWPIlcb0RuglQTYaQ2hGm7jmxEFk= github.com/tklauser/numcpus v0.4.0 h1:E53Dm1HjH1/R2/aoCtXtPgzmElmn51aOkhCFSuZq//o= github.com/tklauser/numcpus v0.4.0/go.mod h1:1+UI3pD8NW14VMwdgJNJ1ESk2UnwhAnz5hMwiKKqXCQ= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/tv42/httpunix v0.0.0-20191220191345-2ba4b9c3382c h1:u6SKchux2yDvFQnDHS3lPnIRmfVJ5Sxy3ao2SIdysLQ= github.com/tv42/httpunix v0.0.0-20191220191345-2ba4b9c3382c/go.mod h1:hzIxponao9Kjc7aWznkXaL4U4TWaDSs8zcsY4Ka08nM= @@ -2702,36 +1837,26 @@ github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oW github.com/ulikunitz/xz v0.5.10 h1:t92gobL9l3HE202wg3rlk19F6X+JOxl9BBrCCMYEYd8= github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= -github.com/urfave/cli v1.19.1/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli v1.22.4/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli v1.22.12/go.mod h1:sSBEIC79qR6OvcmsD4U3KABeOTxDqQtdDnaFuUN30b8= -github.com/vbatts/tar-split v0.11.2/go.mod h1:vV3ZuO2yWSVsz+pfFzDG/upWH1JhjOiEaWq6kXyQ3VI= github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= -github.com/vektah/gqlparser/v2 v2.4.5/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= -github.com/veraison/go-cose v1.0.0-rc.1/go.mod h1:7ziE85vSq4ScFTg6wyoMXjucIGOf4JkFEZi/an96Ct4= github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk= github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= -github.com/vishvananda/netlink v1.1.1-0.20210330154013-f5de75959ad5/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= -github.com/vishvananda/netlink v1.2.1-beta.2/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI= github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= -github.com/vishvananda/netns v0.0.0-20210104183010-2eb08e3e575f/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= github.com/vmware/govmomi v0.18.0 h1:f7QxSmP7meCtoAmiKZogvVbLInT+CZx6Px6K5rYsJZo= github.com/vmware/govmomi v0.18.0/go.mod h1:URlwyTFZX72RmxtxuaFL2Uj3fD1JTvZdx59bHWk6aFU= github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= -github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E= -github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= -github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs= -github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= +github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= +github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/stringprep v1.0.2 h1:6iq84/ryjjeRmMJwxutI51F2GIPlP5BfTvXHeYjyhBc= +github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -2746,7 +1871,6 @@ github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofm github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yashtewari/glob-intersection v0.1.0/go.mod h1:LK7pIC3piUjovexikBbJ26Yml7g8xa5bsjfx2v1fwok= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -2764,42 +1888,24 @@ github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQ github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA= github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= -github.com/zclconf/go-cty v1.12.1 h1:PcupnljUm9EIvbgSHQnHhUr3fO6oFmkOrvs2BAFNXXY= -github.com/zclconf/go-cty v1.12.1/go.mod h1:s9IfD1LK5ccNMSWCVFCE2rJfHiZgi7JijgeWIMfhLvA= -github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= -github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= +go.etcd.io/bbolt v1.3.6 h1:/ecaJf0sk1l4l6V4awd65v2C3ILy7MSj+s/x1ADCIMU= go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4= -go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ= -go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= +go.etcd.io/etcd/api/v3 v3.5.0 h1:GsV3S+OfZEOCNXdtNkBSR7kgLobAa/SO6tCxRa0GAYw= go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8= -go.etcd.io/etcd/api/v3 v3.5.7 h1:sbcmosSVesNrWOJ58ZQFitHMdncusIifYcrBfwrlJSY= -go.etcd.io/etcd/api/v3 v3.5.7/go.mod h1:9qew1gCdDDLu+VwmeG+iFpL+QlpHTo7iubavdVDgCAA= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/pkg/v3 v3.5.5/go.mod h1:ggrwbk069qxpKPq8/FKkQ3Xq9y39kbFR4LnKszpRXeQ= -go.etcd.io/etcd/client/pkg/v3 v3.5.7 h1:y3kf5Gbp4e4q7egZdn5T7W9TSHUvkClN6u+Rq9mEOmg= -go.etcd.io/etcd/client/pkg/v3 v3.5.7/go.mod h1:o0Abi1MK86iad3YrWhgUsbGx1pmTS+hrORWc2CamuhY= +go.etcd.io/etcd/client/pkg/v3 v3.0.0-20210928084031-3df272774672 h1:19vOZe7geDEympjWIVidGi6/psR5Y+aaKnF17PSpdXA= +go.etcd.io/etcd/client/pkg/v3 v3.0.0-20210928084031-3df272774672/go.mod h1:wSVAyLiSU4JOBlqGr29lZeKbllk31oCAXAdTa6MioWQ= +go.etcd.io/etcd/client/v2 v2.305.0 h1:ftQ0nOOHMcbMS3KIaDQ0g5Qcd6bhaBrQT6b89DfwLTs= go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.etcd.io/etcd/client/v2 v2.305.5 h1:DktRP60//JJpnPC0VBymAN/7V71GHMdjDCBt4ZPXDjI= -go.etcd.io/etcd/client/v2 v2.305.5/go.mod h1:zQjKllfqfBVyVStbt4FaosoX2iYd8fV/GRy/PbowgP4= +go.etcd.io/etcd/client/v3 v3.5.0 h1:62Eh0XOro+rDwkrypAGDfgmNh5Joq+z+W9HZdlXMzek= go.etcd.io/etcd/client/v3 v3.5.0/go.mod h1:AIKXXVX/DQXtfTEqBryiLTUXwON+GuvO6Z7lLS/oTh0= -go.etcd.io/etcd/client/v3 v3.5.5/go.mod h1:aApjR4WGlSumpnJ2kloS75h6aHUmAyaPLjHMxpc7E7c= -go.etcd.io/etcd/client/v3 v3.5.7 h1:u/OhpiuCgYY8awOHlhIhmGIGpxfBU/GZBUP3m/3/Iz4= -go.etcd.io/etcd/client/v3 v3.5.7/go.mod h1:sOWmj9DZUMyAngS7QQwCyAXXAL6WhgTOPLNS/NabQgw= -go.etcd.io/etcd/pkg/v3 v3.5.0/go.mod h1:UzJGatBQ1lXChBkQF0AuAtkRQMYnHubxAEYIrC3MSsE= -go.etcd.io/etcd/pkg/v3 v3.5.5/go.mod h1:6ksYFxttiUGzC2uxyqiyOEvhAiD0tuIqSZkX3TyPdaE= -go.etcd.io/etcd/raft/v3 v3.5.0/go.mod h1:UFOHSIvO/nKwd4lhkwabrTD3cqW5yVyYYf/KlD00Szc= -go.etcd.io/etcd/raft/v3 v3.5.5/go.mod h1:76TA48q03g1y1VpTue92jZLr9lIHKUNcYdZOOGyx8rI= -go.etcd.io/etcd/server/v3 v3.5.0/go.mod h1:3Ah5ruV+M+7RZr0+Y/5mNLwC+eQlni+mQmOVdCRJoS4= -go.etcd.io/etcd/server/v3 v3.5.5/go.mod h1:rZ95vDw/jrvsbj9XpTqPrTAB9/kzchVdhRirySPkUBc= -go.etcd.io/gofail v0.1.0/go.mod h1:VZBCXYGZhHAinaBiiqYvuDynvahNsAyLFwB3kEHKz1M= -go.mongodb.org/atlas v0.28.0 h1:CelAXtmiM36tdifSDwWdDH1nNbdvq0M2XfUR8208JxA= -go.mongodb.org/atlas v0.28.0/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg= +go.mongodb.org/atlas v0.13.0/go.mod h1:wVCnHcm/7/IfTjEB6K8K35PLG70yGz8BdkRwX0oK9/M= +go.mongodb.org/atlas v0.15.0 h1:YyOBdBIuI//krRITf4r7PSirJ3YDNNUfNmapxwSyDow= +go.mongodb.org/atlas v0.15.0/go.mod h1:lQhRHIxc6jQHEK3/q9WLu/SdBkPj2fQYhjLGUF6Z3U8= go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= go.mongodb.org/mongo-driver v1.1.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= go.mongodb.org/mongo-driver v1.3.0/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE= @@ -2807,95 +1913,36 @@ go.mongodb.org/mongo-driver v1.3.4/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS go.mongodb.org/mongo-driver v1.4.3/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.11.6 h1:XM7G6PjiGAO5betLF13BIa5TlLUUE3uJ/2Ox3Lz1K+o= -go.mongodb.org/mongo-driver v1.11.6/go.mod h1:G9TgswdsWjX4tmDA5zfs2+6AEPpYJwqblyjsfuh8oXY= +go.mongodb.org/mongo-driver v1.7.3 h1:G4l/eYY9VrQAK/AUgkV0koQKzQnyddnWxrd/Etf0jIs= +go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/contrib v0.20.0/go.mod h1:G/EtFaa6qaN7+LxqfIAT3GiZa7Wv5DTBUzl5H4LY0Kc= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.20.0/go.mod h1:oVGt1LRbBOBq1A5BQLlUg9UaU/54aiHw8cgjV3aWZ/E= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.25.0/go.mod h1:E5NNboN0UqSAki0Atn9kVwaN7I+l25gGxDqBueo/74E= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.28.0/go.mod h1:vEhqr0m4eTc+DWxfsXoXue2GBgV2uUwVznkGIHW/e5w= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.35.0/go.mod h1:h8TWwRAhQpOd0aM5nYsRD8+flnkj+526GEIVlarH7eY= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.40.0/go.mod h1:UMklln0+MRhZC4e3PwmN3pCtq4DyIadWw4yikh6bNrw= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.20.0/go.mod h1:2AboqHi0CiIZU0qwhtUfCYD1GeUzvvIXWNkhDt7ZMG4= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.32.0/go.mod h1:5eCOqeGphOyz6TsY3ZDNjE33SM/TFAK3RGuCL2naTgY= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.35.0/go.mod h1:9NiG9I2aHTKkcxqCILhjtyNA1QEiCjdBACv4IvrFQ+c= -go.opentelemetry.io/otel v0.20.0/go.mod h1:Y3ugLH2oa81t5QO+Lty+zXf8zC9L26ax4Nzoxm/dooo= -go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= -go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= -go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk= -go.opentelemetry.io/otel v1.8.0/go.mod h1:2pkj+iMj0o03Y+cW6/m8Y4WkRdYN3AvCXCnzRMp9yvM= -go.opentelemetry.io/otel v1.10.0/go.mod h1:NbvWjCthWHKBEUMpf0/v8ZRZlni86PpGFEMA9pnQSnQ= -go.opentelemetry.io/otel v1.14.0 h1:/79Huy8wbf5DnIPhemGB+zEPVwnN6fuQybr/SRXa6hM= -go.opentelemetry.io/otel v1.14.0/go.mod h1:o4buv+dJzx8rohcUeRmWUZhqupFvzWis188WlggnNeU= -go.opentelemetry.io/otel/exporters/otlp v0.20.0/go.mod h1:YIieizyaN77rtLJra0buKiNBOm9XQfkPEKBeuhoMwAM= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.3.0/go.mod h1:VpP4/RMn8bv8gNo9uK7/IMY4mtWLELsS+JIP0inH0h4= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.7.0/go.mod h1:M1hVZHNxcbkAlcvrOMlpQ4YOO3Awf+4N2dxkZL3xm04= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.10.0/go.mod h1:78XhIg8Ht9vR4tbLNUhXsiOnE2HOuSeKAiAcoVQEpOY= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.14.0/go.mod h1:UFG7EBMRdXyFstOwH028U0sVf+AvukSGhF0g8+dmNG8= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.0.1/go.mod h1:Kv8liBeVNFkkkbilbgWRpV+wWuu+H5xdOT6HAgd30iw= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.3.0/go.mod h1:hO1KLR7jcKaDDKDkvI9dP/FIhpmna5lkqPUQdEjFAM8= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.7.0/go.mod h1:ceUgdyfNv4h4gLxHR0WNfDiiVmZFodZhZSbOLhpxqXE= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.10.0/go.mod h1:Krqnjl22jUJ0HgMzw5eveuCvFDXY4nSYb4F8t5gdrag= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.14.0/go.mod h1:HrbCVv40OOLTABmOn1ZWty6CHXkU8DK/Urc43tHug70= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.0.1/go.mod h1:xOvWoTOrQjxjW61xtOmD/WKGRYb/P4NzRo3bs65U6Rk= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.3.0/go.mod h1:keUU7UfnwWTWpJ+FWnyqmogPa82nuU5VUANFq49hlMY= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.7.0/go.mod h1:E+/KKhwOSw8yoPxSSuUHG6vKppkvhN+S1Jc7Nib3k3o= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.10.0/go.mod h1:OfUCyyIiDvNXHWpcWgbF+MWvqPZiNa3YDEnivcnYsV0= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.14.0/go.mod h1:5w41DY6S9gZrbjuq6Y+753e96WfPha5IcsOSZTtullM= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.3.0/go.mod h1:QNX1aly8ehqqX1LEa6YniTU7VY9I6R3X/oPxhGdTceE= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.14.0/go.mod h1:+N7zNjIJv4K+DeX67XXET0P+eIciESgaFDBqh+ZJFS4= -go.opentelemetry.io/otel/metric v0.20.0/go.mod h1:598I5tYlH1vzBjn+BTuhzTCSb/9debfNp6R3s7Pr1eU= -go.opentelemetry.io/otel/metric v0.30.0/go.mod h1:/ShZ7+TS4dHzDFmfi1kSXMhMVubNoP0oIaBp70J6UXU= -go.opentelemetry.io/otel/metric v0.31.0/go.mod h1:ohmwj9KTSIeBnDBm/ZwH2PSZxZzoOaG2xZeekTRzL5A= -go.opentelemetry.io/otel/metric v0.37.0/go.mod h1:DmdaHfGt54iV6UKxsV9slj2bBRJcKC1B1uvDLIioc1s= -go.opentelemetry.io/otel/oteltest v0.20.0/go.mod h1:L7bgKf9ZB7qCwT9Up7i9/pn0PWIa9FqQ2IQ8LoxiGnw= -go.opentelemetry.io/otel/sdk v0.20.0/go.mod h1:g/IcepuwNsoiX5Byy2nNV0ySUF1em498m7hBWC279Yc= -go.opentelemetry.io/otel/sdk v1.0.1/go.mod h1:HrdXne+BiwsOHYYkBE5ysIcv2bvdZstxzmCQhxTcZkI= -go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= -go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU= -go.opentelemetry.io/otel/sdk v1.10.0/go.mod h1:vO06iKzD5baltJz1zarxMCNHFpUlUiOy4s65ECtn6kE= -go.opentelemetry.io/otel/sdk v1.14.0 h1:PDCppFRDq8A1jL9v6KMI6dYesaq+DFcDZvjsoGvxGzY= -go.opentelemetry.io/otel/sdk v1.14.0/go.mod h1:bwIC5TjrNG6QDCHNWvW4HLHtUQ4I+VQDsnjhvyZCALM= -go.opentelemetry.io/otel/sdk/export/metric v0.20.0/go.mod h1:h7RBNMsDJ5pmI1zExLi+bJK+Dr8NQCh0qGhm1KDnNlE= -go.opentelemetry.io/otel/sdk/metric v0.20.0/go.mod h1:knxiS8Xd4E/N+ZqKmUPf3gTTZ4/0TjTXukfxjzSTpHE= -go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw= -go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= -go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= -go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU= -go.opentelemetry.io/otel/trace v1.8.0/go.mod h1:0Bt3PXY8w+3pheS3hQUt+wow8b1ojPaTBoTCh2zIFI4= -go.opentelemetry.io/otel/trace v1.10.0/go.mod h1:Sij3YYczqAdz+EhmGhE6TpTxUO5/F/AzrK+kxfGqySM= -go.opentelemetry.io/otel/trace v1.14.0 h1:wp2Mmvj41tDsyAJXiWDWpfNsOiIyd38fy85pyKcFq/M= -go.opentelemetry.io/otel/trace v1.14.0/go.mod h1:8avnQLK+CG77yNLUae4ea2JDQ6iT+gozhnZjy/rw9G8= +go.opentelemetry.io/otel v1.11.2 h1:YBZcQlsVekzFsFbjygXMOXSs6pialIZxcjfO/mBDmR0= +go.opentelemetry.io/otel v1.11.2/go.mod h1:7p4EUV+AqgdlNV9gL97IgUZiVR3yrFXYo53f9BM3tRI= +go.opentelemetry.io/otel/sdk v1.11.2 h1:GF4JoaEx7iihdMFu30sOyRx52HDHOkl9xQ8SMqNXUiU= +go.opentelemetry.io/otel/sdk v1.11.2/go.mod h1:wZ1WxImwpq+lVRo4vsmSOxdd+xwoUJ6rqyLc3SyX9aU= +go.opentelemetry.io/otel/trace v1.11.2 h1:Xf7hWSF2Glv0DE3MH7fBHvtpSBsjcBUe5MYAmZM/+y0= +go.opentelemetry.io/otel/trace v1.11.2/go.mod h1:4N+yC7QEz7TTsG9BSRLNAa63eg5E06ObSbKPmxQ/pKA= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.opentelemetry.io/proto/otlp v0.9.0/go.mod h1:1vKfU9rv61e9EVGthD1zNvUbiwPcimSsOPU9brfSHJg= -go.opentelemetry.io/proto/otlp v0.11.0/go.mod h1:QpEjXPrNQzrFDZgoTo49dgHR9RYRSrg3NAKnUGl9YpQ= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= -go.opentelemetry.io/proto/otlp v0.16.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= -go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= -go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= -go.uber.org/automaxprocs v1.5.1/go.mod h1:BF4eumQw0P9GtnuxxovUd06vwm1o18oMzFtK66vU6XU= go.uber.org/goleak v1.0.0/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= -go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= -go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= @@ -2907,7 +1954,6 @@ go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.19.1 h1:ue41HOKd1vGURxrmeKIgELGb3jPW9DMUDGtsinblHwI= go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -2931,6 +1977,7 @@ golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200604202706-70a84ac30bf9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -2938,64 +1985,28 @@ golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220313003712-b769efc7c000/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220315160706-3147a52a75dd/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220427172511-eb4f295cb31f/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= -golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= +golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= -golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= -golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= -golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191002040644-a1355ae1e2c3/go.mod h1:NOZ3BPKG0ec/BKJQgnvsSFpcKLM5xXVWnvZS97DWHgE= golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= -golang.org/x/exp v0.0.0-20230206171751-46f607a40771/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= -golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 h1:k/i9J1pBpvlfR+9QsetwPyERsqu1GIbi967PQMq3Ivc= -golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w= -golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a h1:Jw5wfR+h9mnIYH+OtGT2im5wV1YGGDora5vTv/aa5bE= -golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20200119044424-58c23975cae1/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20210607152325-775e3b0c77b9/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= -golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= -golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= -golang.org/x/image v0.0.0-20220302094943-723b81ca9867/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -3006,7 +2017,7 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= @@ -3017,17 +2028,11 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= -golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= -golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= -golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.9.0 h1:KENHtAZL2y3NLMYZeHY9DW8HW8V+kQyJsY/V9JlKvCs= -golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -3076,51 +2081,34 @@ golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211008194852-3b03d305991f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/net v0.0.0-20221012135044-0b7e1fb9d458/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/net v0.0.0-20221014081412-f15817d10f9b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20220725212005-46097bf591d3/go.mod h1:AaygXjzTFtRAg2ttMY5RMuhpJ3cNnI0XpyFJD1iQRSM= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190130055435-99b60b757ec1/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -3128,31 +2116,10 @@ golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4Iltr golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= -golang.org/x/oauth2 v0.0.0-20220622183110-fd043fe589d2/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= -golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= -golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= -golang.org/x/oauth2 v0.0.0-20221006150949-b44042a4b9c1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= -golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= -golang.org/x/oauth2 v0.1.0/go.mod h1:G9FE4dLTsbXUu90h/Pf85g4w1D+SSAgR+q46nJZ8M4A= golang.org/x/oauth2 v0.3.0/go.mod h1:rQrIauxkUhJ6CuwEXwymO2/eh4xz2ZWF1nBkcxS+tGk= -golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec= -golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= -golang.org/x/oauth2 v0.8.0 h1:6dkIjl3j3LtZ/O3sTgZTMsLKSftL/B8Zgq4huOIIUu8= -golang.org/x/oauth2 v0.8.0/go.mod h1:yr7u4HXZRm1R1kBWqr/xKNqewf0plRYoB7sla+BCIXE= +golang.org/x/oauth2 v0.6.0 h1:Lh8GPgSKBfWSwFvtuWOfeI3aAAnbXTSutYxJiOJFgIw= +golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -3167,11 +2134,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI= -golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -3240,14 +2204,15 @@ golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200817155316-9781c653f443/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200828194041-157a740278f4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -3261,95 +2226,56 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210304124612-50617c2ba197/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210819135213-f52c844e1c1c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210903071746-97244b99971b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210818153620-00dd8d7831e7/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210906170528-6f6e22806c34/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211031064116-611d5d643895/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220405210540-1e041c57c461/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220825204002-c680a09ffe64/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= -golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols= -golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -3365,31 +2291,22 @@ golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220411224347-583f2d630306 h1:+gHMid33q6pen7kv9xvT+JRinntgeXO2AeZVd0AWD3w= golang.org/x/time v0.0.0-20220411224347-583f2d630306/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= @@ -3415,12 +2332,10 @@ golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -3452,52 +2367,26 @@ golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20200916195026-c9a70fc28ce3/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210101214203-2dba1e4ea05c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= -golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= -golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= -golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ= -golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= +golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.7.0 h1:W4OVu8VVOaIO0yzWMNdepAulS7YfoS3Zabrm8DOXXU4= -golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= -golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= -gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= -gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= -gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0= -gonum.org/v1/gonum v0.11.0 h1:f1IJhK4Km5tBJmaiJXtk/PkL4cdVX6J+tGiM187uT5E= -gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA= -gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= -gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= -gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY= -gonum.org/v1/plot v0.10.1/go.mod h1:VZW5OlhkL1mysU9vaqNHnsy86inf6Ot+jB3r+BczCEo= google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.5.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= @@ -3516,47 +2405,9 @@ google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0M google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= -google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= -google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= -google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= -google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= -google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= -google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= -google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= -google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= -google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= -google.golang.org/api v0.77.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= -google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= -google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= -google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o= -google.golang.org/api v0.85.0/go.mod h1:AqZf8Ep9uZ2pyTvgL+x0D3Zt0eoT9b5E8fmzfu6FO2g= -google.golang.org/api v0.90.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= -google.golang.org/api v0.93.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= -google.golang.org/api v0.95.0/go.mod h1:eADj+UBuxkh5zlrSntJghuNeg8HwQ1w5lTKkuqaETEI= -google.golang.org/api v0.96.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= -google.golang.org/api v0.97.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= -google.golang.org/api v0.98.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= -google.golang.org/api v0.99.0/go.mod h1:1YOf74vkVndF7pG6hIHuINsM7eWwpVTAfNMNiL91A08= -google.golang.org/api v0.100.0/go.mod h1:ZE3Z2+ZOr87Rx7dqFsdRQkRBk36kDtp/h+QpHbB7a70= -google.golang.org/api v0.102.0/go.mod h1:3VFl6/fzoA+qNuS1N1/VfXY4LjoXN/wzeIp7TweWwGo= -google.golang.org/api v0.103.0/go.mod h1:hGtW6nK1AC+d9si/UBhw8Xli+QMOf6xyNAyJw4qU9w0= -google.golang.org/api v0.106.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= -google.golang.org/api v0.107.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= -google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= -google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= -google.golang.org/api v0.124.0 h1:dP6Ef1VgOGqQ8eiv4GiY8RhmeyqzovcXBYPDUYG8Syo= -google.golang.org/api v0.124.0/go.mod h1:xu2HQurE5gi/3t1aFCvhPD781p0a3p11sdunTJ2BlP4= +google.golang.org/api v0.32.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.114.0 h1:1xQPji6cO2E2vLiI+C/XiFAnsn1WV3mjaEwGLhi3grE= +google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -3591,7 +2442,6 @@ google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= @@ -3604,105 +2454,13 @@ google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= google.golang.org/genproto v0.0.0-20210630183607-d20f26d13c79/go.mod h1:yiaVoXHpRzHGyxV3o4DktVWY4mSUErTKaeEOq6C3t3U= -google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= google.golang.org/genproto v0.0.0-20220329172620-7be39ac1afc7/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220502173005-c8bf987b8c21/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= -google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= -google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= -google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= -google.golang.org/genproto v0.0.0-20220628213854-d9e0b6570c03/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= -google.golang.org/genproto v0.0.0-20220722212130-b98a9ff5e252/go.mod h1:GkXuJDJ6aQ7lnJcRF+SJVgFdQhypqgl3LB1C9vabdRE= -google.golang.org/genproto v0.0.0-20220801145646-83ce21fca29f/go.mod h1:iHe1svFLAZg9VWz891+QbRMwUv9O/1Ww+/mngYeThbc= -google.golang.org/genproto v0.0.0-20220815135757-37a418bb8959/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= -google.golang.org/genproto v0.0.0-20220817144833-d7fd3f11b9b1/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= -google.golang.org/genproto v0.0.0-20220822174746-9e6da59bd2fc/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= -google.golang.org/genproto v0.0.0-20220829144015-23454907ede3/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= -google.golang.org/genproto v0.0.0-20220829175752-36a9c930ecbf/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk= -google.golang.org/genproto v0.0.0-20220913154956-18f8339a66a5/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= -google.golang.org/genproto v0.0.0-20220914142337-ca0e39ece12f/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= -google.golang.org/genproto v0.0.0-20220915135415-7fd63a7952de/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= -google.golang.org/genproto v0.0.0-20220916172020-2692e8806bfa/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= -google.golang.org/genproto v0.0.0-20220919141832-68c03719ef51/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo= -google.golang.org/genproto v0.0.0-20220920201722-2b89144ce006/go.mod h1:ht8XFiar2npT/g4vkk7O0WYS1sHOHbdujxbEp7CJWbw= -google.golang.org/genproto v0.0.0-20220926165614-551eb538f295/go.mod h1:woMGP53BroOrRY3xTxlbr8Y3eB/nzAvvFM83q7kG2OI= -google.golang.org/genproto v0.0.0-20220926220553-6981cbe3cfce/go.mod h1:woMGP53BroOrRY3xTxlbr8Y3eB/nzAvvFM83q7kG2OI= -google.golang.org/genproto v0.0.0-20221010155953-15ba04fc1c0e/go.mod h1:3526vdqwhZAwq4wsRUaVG555sVgsNmIjRtO7t/JH29U= -google.golang.org/genproto v0.0.0-20221014173430-6e2ab493f96b/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= -google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= -google.golang.org/genproto v0.0.0-20221024153911-1573dae28c9c/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= -google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= -google.golang.org/genproto v0.0.0-20221027153422-115e99e71e1c/go.mod h1:CGI5F/G+E5bKwmfYo09AXuVN4dD894kIKUFmVbP2/Fo= -google.golang.org/genproto v0.0.0-20221109142239-94d6d90a7d66/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221114212237-e4508ebdbee1/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221117204609-8f9c96812029/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221118155620-16455021b5e6/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221201204527-e3fa12d562f3/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221202195650-67e5cbc046fd/go.mod h1:cTsE614GARnxrLsqKREzmNYJACSWWpAWdNMwnD7c2BE= -google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230112194545-e10362b5ecf9/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230113154510-dbe35b8444a5/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230123190316-2c411cf9d197/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230124163310-31e0e69b6fc2/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230125152338-dcaf20b6aeaa/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230127162408-596548ed4efa/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230209215440-0dfe4f8abfcc/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230216225411-c8e22ba71e44/go.mod h1:8B0gmkoRebU8ukX6HP+4wrVQUY1+6PkQ44BSyIlflHA= -google.golang.org/genproto v0.0.0-20230222225845-10f96fb3dbec/go.mod h1:3Dl5ZL0q0isWJt+FVcfpQyirqemEuLAK/iFvg1UP1Hw= +google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 h1:DdoeryqhaXp1LtT/emMP1BRJPHHKFi5akj/nbx/zNTA= google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= -google.golang.org/genproto v0.0.0-20230525154841-bd750badd5c6 h1:62QuyPXKEkZpjZesyj5K5jABl6MnSnWl+vNuT5oz90E= -google.golang.org/genproto v0.0.0-20230525154841-bd750badd5c6/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= @@ -3724,33 +2482,15 @@ google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k= google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= +google.golang.org/grpc v1.53.0 h1:LAv2ds7cmFV/XTS3XG1NneeENYrXGmorPxsBbptIjNc= google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= -google.golang.org/grpc v1.55.0 h1:3Oj82/tFSCeUrRTg/5E/7d/W5A1tj6Ky1ABAuZuv5ag= -google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 h1:M1YKkFIboKNieVO5DLUEVzQfGwJD30Nv2jfUgzb5UcE= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -3768,8 +2508,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.29.1 h1:7QBf+IK2gx70Ap/hDsOmam3GE0v9HicjfEdAxE62UoM= +google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -3777,7 +2517,6 @@ gopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= @@ -3787,7 +2526,8 @@ gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKW gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI= gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/jcmturner/goidentity.v3 v3.0.0 h1:1duIyWiTaYvVx3YX2CYtpJbUFd7/UuPYCfgXtQ3VTbI= @@ -3827,9 +2567,8 @@ gotest.tools/gotestsum v1.9.0 h1:Jbo/0k/sIOXIJu51IZxEAt27n77xspFEfL6SqKUR72A= gotest.tools/gotestsum v1.9.0/go.mod h1:6JHCiN6TEjA7Kaz23q1bH0e2Dc3YJjDUZ0DmctFZf+w= gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= +gotest.tools/v3 v3.3.0 h1:MfDY1b1/0xN1CyMlQDac0ziEy9zJQd9CXBRRDHw2jJo= gotest.tools/v3 v3.3.0/go.mod h1:Mcr9QNxkg0uMvy/YElmo4SpXgJKWgQvYrT7Kw5RzJ1A= -gotest.tools/v3 v3.4.0 h1:ZazjZUfuVeZGLAmlKKuyv3IKP5orXcwtOwDQH6YVr6o= -gotest.tools/v3 v3.4.0/go.mod h1:CtbdzLSsqVhDgMtKsx03ird5YTGB3ar27v0u/yKBW5g= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -3837,153 +2576,78 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= -honnef.co/go/tools v0.4.3 h1:o/n5/K5gXqk8Gozvs2cnL0F2S1/g1vcGCAx2vETjITw= -honnef.co/go/tools v0.4.3/go.mod h1:36ZgoUOrqOk1GxwHhyryEkq8FQWkUO2xGuSMhUCcdvA= k8s.io/api v0.18.2/go.mod h1:SJCWI7OLzhZSvbY7U8zwNl9UA4o1fizoug34OV/2r78= k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= -k8s.io/api v0.22.5/go.mod h1:mEhXyLaSD1qTOf40rRiKXkc+2iCem09rWLlFwhCEiAs= -k8s.io/api v0.26.2/go.mod h1:1kjMQsFE+QHPfskEcVNgL3+Hp88B80uj0QtSOlj8itU= -k8s.io/api v0.27.2 h1:+H17AJpUMvl+clT+BPnKf0E3ksMAzoBBg7CntpSuADo= -k8s.io/api v0.27.2/go.mod h1:ENmbocXfBT2ADujUXcBhHV55RIT31IIEvkntP6vZKS4= +k8s.io/api v0.25.3 h1:Q1v5UFfYe87vi5H7NU0p4RXC26PPMT8KOpr1TLQbCMQ= +k8s.io/api v0.25.3/go.mod h1:o42gKscFrEVjHdQnyRenACrMtbuJsVdP+WVjqejfzmI= k8s.io/apimachinery v0.18.2/go.mod h1:9SnR/e11v5IbyPCGbvJViimtJ0SwHG4nfZFjU77ftcA= k8s.io/apimachinery v0.20.1/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= k8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= k8s.io/apimachinery v0.20.6/go.mod h1:ejZXtW1Ra6V1O5H8xPBGz+T3+4gfkTCeExAHKU57MAc= -k8s.io/apimachinery v0.22.1/go.mod h1:O3oNtNadZdeOMxHFVxOreoznohCpy0z6mocxbZr7oJ0= -k8s.io/apimachinery v0.22.5/go.mod h1:xziclGKwuuJ2RM5/rSFQSYAj0zdbci3DH8kj+WvyN0U= -k8s.io/apimachinery v0.25.0/go.mod h1:qMx9eAk0sZQGsXGu86fab8tZdffHbwUfsvzqKn4mfB0= -k8s.io/apimachinery v0.26.2/go.mod h1:ats7nN1LExKHvJ9TmwootT00Yz05MuYqPXEXaVeOy5I= -k8s.io/apimachinery v0.27.2 h1:vBjGaKKieaIreI+oQwELalVG4d8f3YAMNpWLzDXkxeg= -k8s.io/apimachinery v0.27.2/go.mod h1:XNfZ6xklnMCOGGFNqXG7bUrQCoR04dh/E7FprV6pb+E= +k8s.io/apimachinery v0.25.3 h1:7o9ium4uyUOM76t6aunP0nZuex7gDf8VGwkR5RcJnQc= +k8s.io/apimachinery v0.25.3/go.mod h1:jaF9C/iPNM1FuLl7Zuy5b9v+n35HGSh6AQ4HYRkCqwo= k8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU= k8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM= k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q= -k8s.io/apiserver v0.22.5/go.mod h1:s2WbtgZAkTKt679sYtSudEQrTGWUSQAPe6MupLnlmaQ= -k8s.io/apiserver v0.26.2/go.mod h1:GHcozwXgXsPuOJ28EnQ/jXEM9QeG6HT22YxSNmpYNh8= k8s.io/client-go v0.18.2/go.mod h1:Xcm5wVGXX9HAA2JJ2sSBUn3tCJ+4SVlCbl2MNNv+CIU= k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y= k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k= k8s.io/client-go v0.20.6/go.mod h1:nNQMnOvEUEsOzRRFIIkdmYOjAZrC8bgq0ExboWSU1I0= -k8s.io/client-go v0.22.5/go.mod h1:cs6yf/61q2T1SdQL5Rdcjg9J1ElXSwbjSrW2vFImM4Y= -k8s.io/client-go v0.26.2/go.mod h1:u5EjOuSyBa09yqqyY7m3abZeovO/7D/WehVVlZ2qcqU= -k8s.io/client-go v0.27.2 h1:vDLSeuYvCHKeoQRhCXjxXO45nHVv2Ip4Fe0MfioMrhE= -k8s.io/client-go v0.27.2/go.mod h1:tY0gVmUsHrAmjzHX9zs7eCjxcBsf8IiNe7KQ52biTcQ= +k8s.io/client-go v0.25.3 h1:oB4Dyl8d6UbfDHD8Bv8evKylzs3BXzzufLiO27xuPs0= +k8s.io/client-go v0.25.3/go.mod h1:t39LPczAIMwycjcXkVc+CB+PZV69jQuNx4um5ORDjQA= k8s.io/code-generator v0.19.7/go.mod h1:lwEq3YnLYb/7uVXLorOJfxg+cUu2oihFhHZ0n9NIla0= k8s.io/component-base v0.20.1/go.mod h1:guxkoJnNoh8LNrbtiQOlyp2Y2XFCZQmrcg2n/DeYNLk= k8s.io/component-base v0.20.4/go.mod h1:t4p9EdiagbVCJKrQ1RsA5/V4rFQNDfRlevJajlGwgjI= k8s.io/component-base v0.20.6/go.mod h1:6f1MPBAeI+mvuts3sIdtpjljHWBQ2cIy38oBIWMYnrM= -k8s.io/component-base v0.22.5/go.mod h1:VK3I+TjuF9eaa+Ln67dKxhGar5ynVbwnGrUiNF4MqCI= -k8s.io/component-base v0.26.2/go.mod h1:DxbuIe9M3IZPRxPIzhch2m1eT7uFrSBJUBuVCQEBivs= k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM= k8s.io/cri-api v0.20.1/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= k8s.io/cri-api v0.20.4/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= k8s.io/cri-api v0.20.6/go.mod h1:ew44AjNXwyn1s0U4xCKGodU7J1HzBeZ1MpGrpa5r8Yc= -k8s.io/cri-api v0.23.1/go.mod h1:REJE3PSU0h/LOV1APBrupxrEJqnoxZC8KWzkBUHwrK4= -k8s.io/cri-api v0.25.0/go.mod h1:J1rAyQkSJ2Q6I+aBMOVgg2/cbbebso6FNa0UagiR0kc= -k8s.io/cri-api v0.25.3/go.mod h1:riC/P0yOGUf2K1735wW+CXs1aY2ctBgePtnnoFLd0dU= -k8s.io/cri-api v0.26.2/go.mod h1:Oo8O7MKFPNDxfDf2LmrF/3Hf30q1C6iliGuv3la3tIA= k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20200428234225-8167cfdcfc14/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20201113003025-83324d819ded/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= -k8s.io/gengo v0.0.0-20210813121822-485abfe95c7c/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= k8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= -k8s.io/klog/v2 v2.9.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= -k8s.io/klog/v2 v2.30.0/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= +k8s.io/klog/v2 v2.70.1 h1:7aaoSdahviPmR+XkS7FyxlkkXs6tHISSG03RxleQAVQ= k8s.io/klog/v2 v2.70.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= -k8s.io/klog/v2 v2.80.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= -k8s.io/klog/v2 v2.90.1 h1:m4bYOKall2MmOiRaR1J+We67Do7vm9KiQVlT96lnHUw= -k8s.io/klog/v2 v2.90.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= -k8s.io/kms v0.26.2/go.mod h1:69qGnf1NsFOQP07fBYqNLZklqEHSJF024JqYCaeVxHg= k8s.io/kube-openapi v0.0.0-20200121204235-bf4fb3bd569c/go.mod h1:GRQhZsXIAJ1xR0C9bd8UpWHZ5plfAS9fzPjJuQ6JL3E= k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o= k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM= -k8s.io/kube-openapi v0.0.0-20210421082810-95288971da7e/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw= -k8s.io/kube-openapi v0.0.0-20211109043538-20434351676c/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw= +k8s.io/kube-openapi v0.0.0-20220803162953-67bda5d908f1 h1:MQ8BAZPZlWk3S9K4a9NCkIFQtZShWqoha7snGixVgEA= k8s.io/kube-openapi v0.0.0-20220803162953-67bda5d908f1/go.mod h1:C/N6wCaBHeBHkHUesQOQy2/MZqGgMAFPqGsGQLdbZBU= -k8s.io/kube-openapi v0.0.0-20221012153701-172d655c2280/go.mod h1:+Axhij7bCpeqhklhUTe3xmOn6bWxolyZEeyaFpjGtl4= -k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f h1:2kWPakN3i/k81b0gvD5C5FJ2kxm1WrQFanWchyKuqGg= -k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f/go.mod h1:byini6yhqGC14c3ebc/QwanvYwhuMWF6yz2F8uwW8eg= k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk= k8s.io/utils v0.0.0-20200324210504-a9aa75ae1b89/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= k8s.io/utils v0.0.0-20201110183641-67b214c5f920/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= -k8s.io/utils v0.0.0-20210802155522-efc7438f0176/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= -k8s.io/utils v0.0.0-20210819203725-bdf08cb9a70a/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= -k8s.io/utils v0.0.0-20210930125809-cb0fa318a74b/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= +k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed h1:jAne/RjBTyawwAy0utX5eqigAwz/lQhTmy+Hr/Cpue4= k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= -k8s.io/utils v0.0.0-20221107191617-1a15be271d1d/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5 h1:kmDqav+P+/5e1i9tFfHq1qcF3sOrDp+YEkVDAHu7Jwk= -k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= layeh.com/radius v0.0.0-20190322222518-890bc1058917 h1:BDXFaFzUt5EIqe/4wrTc4AcYZWP6iC6Ult+jQWLh5eU= layeh.com/radius v0.0.0-20190322222518-890bc1058917/go.mod h1:fywZKyu//X7iRzaxLgPWsvc0L26IUpVvE/aeIL2JtIQ= -lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= -lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= -modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= -modernc.org/cc/v3 v3.36.2/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= -modernc.org/cc/v3 v3.36.3/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= -modernc.org/ccgo/v3 v3.0.0-20220428102840-41399a37e894/go.mod h1:eI31LL8EwEBKPpNpA4bU1/i+sKOwOrQy8D87zWUcRZc= -modernc.org/ccgo/v3 v3.0.0-20220430103911-bc99d88307be/go.mod h1:bwdAnOoaIt8Ax9YdWGjxWsdkPcZyRPHqrOvJxaKAKGw= -modernc.org/ccgo/v3 v3.16.4/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= -modernc.org/ccgo/v3 v3.16.6/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= -modernc.org/ccgo/v3 v3.16.8/go.mod h1:zNjwkizS+fIFDrDjIAgBSCLkWbJuHF+ar3QRn+Z9aws= -modernc.org/ccgo/v3 v3.16.9/go.mod h1:zNMzC9A9xeNUepy6KuZBbugn3c0Mc9TeiJO4lgvkJDo= -modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ= -modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM= -modernc.org/libc v0.0.0-20220428101251-2d5f3daf273b/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= -modernc.org/libc v1.16.0/go.mod h1:N4LD6DBE9cf+Dzf9buBlzVJndKr/iJHG97vGLHYnb5A= -modernc.org/libc v1.16.1/go.mod h1:JjJE0eu4yeK7tab2n4S1w8tlWd9MxXLRzheaRnAKymU= -modernc.org/libc v1.16.17/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU= -modernc.org/libc v1.16.19/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= -modernc.org/libc v1.17.0/go.mod h1:XsgLldpP4aWlPlsjqKRdHPqCxCjISdHfM/yeWC5GyW0= -modernc.org/libc v1.17.1/go.mod h1:FZ23b+8LjxZs7XtFMbSzL/EhPxNbfZbErxEHc7cbD9s= -modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= -modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= -modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= -modernc.org/memory v1.1.1/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= -modernc.org/memory v1.2.0/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= -modernc.org/memory v1.2.1/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= -modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= -modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= -modernc.org/sqlite v1.18.1/go.mod h1:6ho+Gow7oX5V+OiOQ6Tr4xeqbx13UZ6t+Fw9IRUG4d4= -modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw= -modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= -modernc.org/tcl v1.13.1/go.mod h1:XOLfOwzhkljL4itZkK6T72ckMgvj0BDsnKNdZVUOecw= -modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= -modernc.org/z v1.5.1/go.mod h1:eWFB510QWW5Th9YGZT81s+LwvaAs3Q2yr4sP0rmLkv8= mvdan.cc/gofumpt v0.1.1/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48= mvdan.cc/gofumpt v0.3.1 h1:avhhrOmv0IuvQVK7fvwV91oFSGAk5/6Po8GXTzICeu8= mvdan.cc/gofumpt v0.3.1/go.mod h1:w3ymliuxvzVx8DAutBnVyDqYb1Niy/yCJt/lk821YCE= nhooyr.io/websocket v1.8.7 h1:usjR2uOr/zjjkVMy0lW+PPohFok7PCow5sDjLgX4P4g= nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= -oras.land/oras-go v1.2.0/go.mod h1:pFNs7oHp2dYsYMSS82HaX5l4mpnGO7hbpPN6EWH2ltc= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= -sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.22/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= -sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.35/go.mod h1:WxjusMwXlKzfAs4p9km6XJRndVt2FROgMVCE4cdohFo= +sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k= sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= sigs.k8s.io/structured-merge-diff/v3 v3.0.0-20200116222232-67a7b8c61874/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= sigs.k8s.io/structured-merge-diff/v3 v3.0.0/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= -sigs.k8s.io/structured-merge-diff/v4 v4.1.2/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4= sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q= sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= -sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= -sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/helper/benchhelpers/benchhelpers.go b/helper/benchhelpers/benchhelpers.go index 06dcde604e6182..9c0feac15c83de 100644 --- a/helper/benchhelpers/benchhelpers.go +++ b/helper/benchhelpers/benchhelpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package benchhelpers import ( diff --git a/helper/builtinplugins/registry.go b/helper/builtinplugins/registry.go index b4d3da5937f017..8a24a58c34ade6 100644 --- a/helper/builtinplugins/registry.go +++ b/helper/builtinplugins/registry.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package builtinplugins import ( diff --git a/helper/builtinplugins/registry_test.go b/helper/builtinplugins/registry_test.go index cfaec51814477b..47c6654f179818 100644 --- a/helper/builtinplugins/registry_test.go +++ b/helper/builtinplugins/registry_test.go @@ -1,21 +1,12 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package builtinplugins import ( - "bufio" - "fmt" - "os" "reflect" - "regexp" "testing" credUserpass "github.com/hashicorp/vault/builtin/credential/userpass" dbMysql "github.com/hashicorp/vault/plugins/database/mysql" "github.com/hashicorp/vault/sdk/helper/consts" - - "golang.org/x/exp/slices" ) // Test_RegistryGet exercises the (registry).Get functionality by comparing @@ -227,108 +218,3 @@ func Test_RegistryStatus(t *testing.T) { }) } } - -// Test_RegistryMatchesGenOpenapi ensures that the plugins mounted in gen_openapi.sh match registry.go -func Test_RegistryMatchesGenOpenapi(t *testing.T) { - const scriptPath = "../../scripts/gen_openapi.sh" - - // parseScript fetches the contents of gen_openapi.sh script & extract the relevant lines - parseScript := func(path string) ([]string, []string, error) { - f, err := os.Open(scriptPath) - if err != nil { - return nil, nil, fmt.Errorf("could not open gen_openapi.sh script: %w", err) - } - defer f.Close() - - var ( - credentialBackends []string - credentialBackendsRe = regexp.MustCompile(`^vault auth enable (?:"([a-zA-Z]+)"|([a-zA-Z]+))$`) - - secretsBackends []string - secretsBackendsRe = regexp.MustCompile(`^vault secrets enable (?:"([a-zA-Z]+)"|([a-zA-Z]+))$`) - ) - - scanner := bufio.NewScanner(f) - - for scanner.Scan() { - line := scanner.Text() - - if m := credentialBackendsRe.FindStringSubmatch(line); m != nil { - credentialBackends = append(credentialBackends, m[1]) - } - if m := secretsBackendsRe.FindStringSubmatch(line); m != nil { - secretsBackends = append(secretsBackends, m[1]) - } - } - - if err := scanner.Err(); err != nil { - return nil, nil, fmt.Errorf("error scanning gen_openapi.sh: %v", err) - } - - return credentialBackends, secretsBackends, nil - } - - // ensureInRegistry ensures that the given plugin is in registry and marked as "supported" - ensureInRegistry := func(t *testing.T, name string, pluginType consts.PluginType) { - t.Helper() - - // "database" will not be present in registry, it is represented as - // a list of database plugins instead - if name == "database" && pluginType == consts.PluginTypeSecrets { - return - } - - deprecationStatus, ok := Registry.DeprecationStatus(name, pluginType) - if !ok { - t.Fatalf("%q %s backend is missing from registry.go; please remove it from gen_openapi.sh", name, pluginType) - } - - if deprecationStatus == consts.Removed { - t.Fatalf("%q %s backend is marked 'removed' in registry.go; please remove it from gen_openapi.sh", name, pluginType) - } - } - - // ensureInScript ensures that the given plugin name in in gen_openapi.sh script - ensureInScript := func(t *testing.T, scriptBackends []string, name string) { - t.Helper() - - for _, excluded := range []string{ - "oidc", // alias for "jwt" - "openldap", // alias for "ldap" - } { - if name == excluded { - return - } - } - - if !slices.Contains(scriptBackends, name) { - t.Fatalf("%q backend could not be found in gen_openapi.sh, please add it there", name) - } - } - - // test starts here - scriptCredentialBackends, scriptSecretsBackends, err := parseScript(scriptPath) - if err != nil { - t.Fatal(err) - } - - for _, name := range scriptCredentialBackends { - ensureInRegistry(t, name, consts.PluginTypeCredential) - } - - for _, name := range scriptSecretsBackends { - ensureInRegistry(t, name, consts.PluginTypeSecrets) - } - - for name, backend := range Registry.credentialBackends { - if backend.DeprecationStatus == consts.Supported { - ensureInScript(t, scriptCredentialBackends, name) - } - } - - for name, backend := range Registry.logicalBackends { - if backend.DeprecationStatus == consts.Supported { - ensureInScript(t, scriptSecretsBackends, name) - } - } -} diff --git a/helper/constants/constants_oss.go b/helper/constants/constants_oss.go index 8675f7030658a2..22b9928fd3c411 100644 --- a/helper/constants/constants_oss.go +++ b/helper/constants/constants_oss.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package constants diff --git a/helper/constants/fips.go b/helper/constants/fips.go index 9632d014a5496c..2a9f7ee7aae6b3 100644 --- a/helper/constants/fips.go +++ b/helper/constants/fips.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !fips package constants diff --git a/helper/constants/fips_build_check.go b/helper/constants/fips_build_check.go index 10e07e583f98e0..1e865b499f62e3 100644 --- a/helper/constants/fips_build_check.go +++ b/helper/constants/fips_build_check.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build (!fips && (fips_140_2 || fips_140_3)) || (fips && !fips_140_2 && !fips_140_3) || (fips_140_2 && fips_140_3) package constants diff --git a/helper/constants/fips_cgo_check.go b/helper/constants/fips_cgo_check.go index 6de7d9f0d0315d..56eabb6c81e5fd 100644 --- a/helper/constants/fips_cgo_check.go +++ b/helper/constants/fips_cgo_check.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build (fips || fips_140_2 || fips_140_3) && !cgo package constants diff --git a/helper/dhutil/dhutil.go b/helper/dhutil/dhutil.go index 97552d4cc4c690..a0ddde25bd43a1 100644 --- a/helper/dhutil/dhutil.go +++ b/helper/dhutil/dhutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dhutil import ( diff --git a/helper/dhutil/dhutil_test.go b/helper/dhutil/dhutil_test.go index 4b94f601d92d63..46e90196d15e77 100644 --- a/helper/dhutil/dhutil_test.go +++ b/helper/dhutil/dhutil_test.go @@ -1,4 +1 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dhutil diff --git a/helper/experiments/experiments.go b/helper/experiments/experiments.go index 538430e64cccf4..eb522def19127a 100644 --- a/helper/experiments/experiments.go +++ b/helper/experiments/experiments.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package experiments const VaultExperimentEventsAlpha1 = "events.alpha1" diff --git a/helper/fairshare/fairshare_testing_util.go b/helper/fairshare/fairshare_testing_util.go index 8061795947d829..1f65acd94d903c 100644 --- a/helper/fairshare/fairshare_testing_util.go +++ b/helper/fairshare/fairshare_testing_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package fairshare import ( diff --git a/helper/fairshare/jobmanager.go b/helper/fairshare/jobmanager.go index dc9a6198af2569..75c7662fc7656c 100644 --- a/helper/fairshare/jobmanager.go +++ b/helper/fairshare/jobmanager.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package fairshare import ( diff --git a/helper/fairshare/jobmanager_test.go b/helper/fairshare/jobmanager_test.go index 3d6638a4a766f1..d90314782294e0 100644 --- a/helper/fairshare/jobmanager_test.go +++ b/helper/fairshare/jobmanager_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package fairshare import ( diff --git a/helper/fairshare/workerpool.go b/helper/fairshare/workerpool.go index e655a9084dd6c8..f5179ba4e408cc 100644 --- a/helper/fairshare/workerpool.go +++ b/helper/fairshare/workerpool.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package fairshare import ( diff --git a/helper/fairshare/workerpool_test.go b/helper/fairshare/workerpool_test.go index eb563140374b26..a3c3f68a1e3fed 100644 --- a/helper/fairshare/workerpool_test.go +++ b/helper/fairshare/workerpool_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package fairshare import ( diff --git a/helper/flag-kv/flag.go b/helper/flag-kv/flag.go index a3b04cec11ae62..06ae27111a8a13 100644 --- a/helper/flag-kv/flag.go +++ b/helper/flag-kv/flag.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kvFlag import ( diff --git a/helper/flag-kv/flag_test.go b/helper/flag-kv/flag_test.go index b083d52e74e35b..2fc88aa5f3ed7c 100644 --- a/helper/flag-kv/flag_test.go +++ b/helper/flag-kv/flag_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kvFlag import ( diff --git a/helper/flag-slice/flag.go b/helper/flag-slice/flag.go index b8234385ef03c5..da75149dc488e0 100644 --- a/helper/flag-slice/flag.go +++ b/helper/flag-slice/flag.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package sliceflag import "strings" diff --git a/helper/flag-slice/flag_test.go b/helper/flag-slice/flag_test.go index 7973d57926a21b..f72e1d9605d662 100644 --- a/helper/flag-slice/flag_test.go +++ b/helper/flag-slice/flag_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package sliceflag import ( diff --git a/helper/forwarding/types.pb.go b/helper/forwarding/types.pb.go index bf579d0382c2c3..4d4e0aec1d0c90 100644 --- a/helper/forwarding/types.pb.go +++ b/helper/forwarding/types.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/helper/forwarding/types.proto b/helper/forwarding/types.proto index 7624257919dea9..8f1376a1800446 100644 --- a/helper/forwarding/types.proto +++ b/helper/forwarding/types.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/helper/forwarding"; diff --git a/helper/forwarding/util.go b/helper/forwarding/util.go index 83612354322211..de92639afbee1d 100644 --- a/helper/forwarding/util.go +++ b/helper/forwarding/util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package forwarding import ( diff --git a/helper/forwarding/util_test.go b/helper/forwarding/util_test.go index 192646a15f4d2d..0af2b89e989b27 100644 --- a/helper/forwarding/util_test.go +++ b/helper/forwarding/util_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package forwarding import ( diff --git a/helper/hostutil/hostinfo.go b/helper/hostutil/hostinfo.go index 25c11e0b0753bc..d35afb57d900a8 100644 --- a/helper/hostutil/hostinfo.go +++ b/helper/hostutil/hostinfo.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !openbsd package hostutil diff --git a/helper/hostutil/hostinfo_error.go b/helper/hostutil/hostinfo_error.go index afbec28fa262d9..ca5d8a2941c030 100644 --- a/helper/hostutil/hostinfo_error.go +++ b/helper/hostutil/hostinfo_error.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package hostutil import "fmt" diff --git a/helper/hostutil/hostinfo_openbsd.go b/helper/hostutil/hostinfo_openbsd.go index dbe1655e90dc81..8f01458afe1ae1 100644 --- a/helper/hostutil/hostinfo_openbsd.go +++ b/helper/hostutil/hostinfo_openbsd.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build openbsd package hostutil diff --git a/helper/hostutil/hostinfo_test.go b/helper/hostutil/hostinfo_test.go index 0f53744adc1e33..c54893b17dbb90 100644 --- a/helper/hostutil/hostinfo_test.go +++ b/helper/hostutil/hostinfo_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package hostutil import ( diff --git a/helper/identity/identity.go b/helper/identity/identity.go index a7769f08e0186e..9a28be71567917 100644 --- a/helper/identity/identity.go +++ b/helper/identity/identity.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package identity import ( diff --git a/helper/identity/mfa/mfa.go b/helper/identity/mfa/mfa.go index 1f8af4f4c20b29..d4bbf10b484632 100644 --- a/helper/identity/mfa/mfa.go +++ b/helper/identity/mfa/mfa.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mfa import ( diff --git a/helper/identity/mfa/sentinel.go b/helper/identity/mfa/sentinel.go index a587aa70b699c6..f6d8c7b994224a 100644 --- a/helper/identity/mfa/sentinel.go +++ b/helper/identity/mfa/sentinel.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mfa func (c *Config) SentinelGet(key string) (interface{}, error) { diff --git a/helper/identity/mfa/types.pb.go b/helper/identity/mfa/types.pb.go index 57dbab0da60c80..7431f564818238 100644 --- a/helper/identity/mfa/types.pb.go +++ b/helper/identity/mfa/types.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/helper/identity/mfa/types.proto b/helper/identity/mfa/types.proto index 65eb853be24618..decade25b9af7c 100644 --- a/helper/identity/mfa/types.proto +++ b/helper/identity/mfa/types.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/helper/identity/mfa"; diff --git a/helper/identity/sentinel.go b/helper/identity/sentinel.go index 4f65e62c848def..2c2bc4b940f25c 100644 --- a/helper/identity/sentinel.go +++ b/helper/identity/sentinel.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package identity import "github.com/golang/protobuf/ptypes" diff --git a/helper/identity/types.pb.go b/helper/identity/types.pb.go index 91b4c0ff20d78e..0f32206842ad82 100644 --- a/helper/identity/types.pb.go +++ b/helper/identity/types.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/helper/identity/types.proto b/helper/identity/types.proto index a34d715acf3401..0ea7525eb0f5b5 100644 --- a/helper/identity/types.proto +++ b/helper/identity/types.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/helper/identity"; diff --git a/helper/locking/lock.go b/helper/locking/lock.go index a9bff4c0f06c7d..8043f01ad61743 100644 --- a/helper/locking/lock.go +++ b/helper/locking/lock.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package locking import ( diff --git a/helper/logging/logfile.go b/helper/logging/logfile.go index 9417e9ca8233d7..93d84a8ddd0867 100644 --- a/helper/logging/logfile.go +++ b/helper/logging/logfile.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logging import ( diff --git a/helper/logging/logfile_test.go b/helper/logging/logfile_test.go index a0cae986aadcba..86153f17e258e2 100644 --- a/helper/logging/logfile_test.go +++ b/helper/logging/logfile_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logging import ( diff --git a/helper/logging/logger.go b/helper/logging/logger.go index 1efac27bedfd2c..e876d54f15575a 100644 --- a/helper/logging/logger.go +++ b/helper/logging/logger.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logging import ( diff --git a/helper/logging/logger_test.go b/helper/logging/logger_test.go index 30ff1783a776fb..efeabc5d565888 100644 --- a/helper/logging/logger_test.go +++ b/helper/logging/logger_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logging import ( diff --git a/helper/metricsutil/bucket.go b/helper/metricsutil/bucket.go index 0f602e22ad1536..9cbb2cdc2dafdc 100644 --- a/helper/metricsutil/bucket.go +++ b/helper/metricsutil/bucket.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package metricsutil import ( diff --git a/helper/metricsutil/bucket_test.go b/helper/metricsutil/bucket_test.go index 1179e4dbdcef7b..f37584781627f0 100644 --- a/helper/metricsutil/bucket_test.go +++ b/helper/metricsutil/bucket_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package metricsutil import ( diff --git a/helper/metricsutil/gauge_process.go b/helper/metricsutil/gauge_process.go index c6fcd56639a2ad..0ad0e9d876cf96 100644 --- a/helper/metricsutil/gauge_process.go +++ b/helper/metricsutil/gauge_process.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package metricsutil import ( @@ -11,9 +8,24 @@ import ( "github.com/armon/go-metrics" log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/helper/timeutil" ) +// This interface allows unit tests to substitute in a simulated clock. +type clock interface { + Now() time.Time + NewTicker(time.Duration) *time.Ticker +} + +type defaultClock struct{} + +func (_ defaultClock) Now() time.Time { + return time.Now() +} + +func (_ defaultClock) NewTicker(d time.Duration) *time.Ticker { + return time.NewTicker(d) +} + // GaugeLabelValues is one gauge in a set sharing a single key, that // are measured in a batch. type GaugeLabelValues struct { @@ -61,7 +73,7 @@ type GaugeCollectionProcess struct { maxGaugeCardinality int // time source - clock timeutil.Clock + clock clock } // NewGaugeCollectionProcess creates a new collection process for the callback @@ -86,7 +98,7 @@ func NewGaugeCollectionProcess( gaugeInterval, maxGaugeCardinality, logger, - timeutil.DefaultClock{}, + defaultClock{}, ) } @@ -109,7 +121,7 @@ func (m *ClusterMetricSink) NewGaugeCollectionProcess( m.GaugeInterval, m.MaxGaugeCardinality, logger, - timeutil.DefaultClock{}, + defaultClock{}, ) } @@ -122,7 +134,7 @@ func newGaugeCollectionProcessWithClock( gaugeInterval time.Duration, maxGaugeCardinality int, logger log.Logger, - clock timeutil.Clock, + clock clock, ) (*GaugeCollectionProcess, error) { process := &GaugeCollectionProcess{ stop: make(chan struct{}, 1), diff --git a/helper/metricsutil/gauge_process_test.go b/helper/metricsutil/gauge_process_test.go index efd74e707df802..9971714e04e307 100644 --- a/helper/metricsutil/gauge_process_test.go +++ b/helper/metricsutil/gauge_process_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package metricsutil import ( @@ -15,7 +12,6 @@ import ( "github.com/armon/go-metrics" log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/helper/timeutil" ) // SimulatedTime maintains a virtual clock so the test isn't @@ -25,10 +21,9 @@ import ( type SimulatedTime struct { now time.Time tickerBarrier chan *SimulatedTicker - timeutil.DefaultClock } -var _ timeutil.Clock = &SimulatedTime{} +var _ clock = &SimulatedTime{} type SimulatedTicker struct { ticker *time.Ticker @@ -123,7 +118,7 @@ func TestGauge_Creation(t *testing.T) { t.Fatalf("Error creating collection process: %v", err) } - if _, ok := p.clock.(timeutil.DefaultClock); !ok { + if _, ok := p.clock.(defaultClock); !ok { t.Error("Default clock not installed.") } diff --git a/helper/metricsutil/metricsutil.go b/helper/metricsutil/metricsutil.go index cfc2e1109655f8..de85c7e4628e96 100644 --- a/helper/metricsutil/metricsutil.go +++ b/helper/metricsutil/metricsutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package metricsutil import ( diff --git a/helper/metricsutil/metricsutil_test.go b/helper/metricsutil/metricsutil_test.go index f8f17fedb3611f..1b817ddad192d9 100644 --- a/helper/metricsutil/metricsutil_test.go +++ b/helper/metricsutil/metricsutil_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package metricsutil import ( diff --git a/helper/metricsutil/wrapped_metrics.go b/helper/metricsutil/wrapped_metrics.go index e3df058e116590..67deb3bee1cd17 100644 --- a/helper/metricsutil/wrapped_metrics.go +++ b/helper/metricsutil/wrapped_metrics.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package metricsutil import ( diff --git a/helper/metricsutil/wrapped_metrics_test.go b/helper/metricsutil/wrapped_metrics_test.go index b65809b46109a3..c0fb2c386677dd 100644 --- a/helper/metricsutil/wrapped_metrics_test.go +++ b/helper/metricsutil/wrapped_metrics_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package metricsutil import ( diff --git a/helper/monitor/monitor.go b/helper/monitor/monitor.go index 28ecf0eee57109..490e2fa08bbafa 100644 --- a/helper/monitor/monitor.go +++ b/helper/monitor/monitor.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package monitor import ( diff --git a/helper/monitor/monitor_test.go b/helper/monitor/monitor_test.go index 06e1e01777827d..0133a351b99d50 100644 --- a/helper/monitor/monitor_test.go +++ b/helper/monitor/monitor_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package monitor import ( diff --git a/helper/namespace/namespace.go b/helper/namespace/namespace.go index 04a5b79dbec818..c1226a5547f9ae 100644 --- a/helper/namespace/namespace.go +++ b/helper/namespace/namespace.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package namespace import ( diff --git a/helper/namespace/namespace_test.go b/helper/namespace/namespace_test.go index fd4c4c2f9988c2..442b46b904470e 100644 --- a/helper/namespace/namespace_test.go +++ b/helper/namespace/namespace_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package namespace import ( diff --git a/helper/osutil/fileinfo.go b/helper/osutil/fileinfo.go index f14db6b9c8bf15..59b99859ebae4a 100644 --- a/helper/osutil/fileinfo.go +++ b/helper/osutil/fileinfo.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package osutil import ( diff --git a/helper/osutil/fileinfo_test.go b/helper/osutil/fileinfo_test.go index 8c3316bc91abdd..febd11966a35b3 100644 --- a/helper/osutil/fileinfo_test.go +++ b/helper/osutil/fileinfo_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package osutil import ( diff --git a/helper/osutil/fileinfo_unix.go b/helper/osutil/fileinfo_unix.go index bb60c498797dee..c49a591ce9c663 100644 --- a/helper/osutil/fileinfo_unix.go +++ b/helper/osutil/fileinfo_unix.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !windows package osutil diff --git a/helper/osutil/fileinfo_unix_test.go b/helper/osutil/fileinfo_unix_test.go index 302bd9e16847bb..c31ca5bdc37227 100644 --- a/helper/osutil/fileinfo_unix_test.go +++ b/helper/osutil/fileinfo_unix_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !windows package osutil diff --git a/helper/osutil/fileinfo_windows.go b/helper/osutil/fileinfo_windows.go index 193fe3ff8420c0..0869c97e7d98a9 100644 --- a/helper/osutil/fileinfo_windows.go +++ b/helper/osutil/fileinfo_windows.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build windows package osutil diff --git a/helper/parseip/parseip.go b/helper/parseip/parseip.go index 95579633b50926..414a3f05c81bec 100644 --- a/helper/parseip/parseip.go +++ b/helper/parseip/parseip.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package parseip import ( diff --git a/helper/parseip/parseip_test.go b/helper/parseip/parseip_test.go index e26c810be677f8..5d2b3645b853d4 100644 --- a/helper/parseip/parseip_test.go +++ b/helper/parseip/parseip_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package parseip import ( diff --git a/helper/pgpkeys/encrypt_decrypt.go b/helper/pgpkeys/encrypt_decrypt.go index c7a8027cd2ce8f..554013d6af494c 100644 --- a/helper/pgpkeys/encrypt_decrypt.go +++ b/helper/pgpkeys/encrypt_decrypt.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pgpkeys import ( diff --git a/helper/pgpkeys/flag.go b/helper/pgpkeys/flag.go index 79d114b4d9cc16..e107bc99433858 100644 --- a/helper/pgpkeys/flag.go +++ b/helper/pgpkeys/flag.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pgpkeys import ( diff --git a/helper/pgpkeys/flag_test.go b/helper/pgpkeys/flag_test.go index 9ea25d44d5898f..ec6402d5eb2f2b 100644 --- a/helper/pgpkeys/flag_test.go +++ b/helper/pgpkeys/flag_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pgpkeys import ( diff --git a/helper/pgpkeys/keybase.go b/helper/pgpkeys/keybase.go index b24e4bf231c8aa..b2571b451d9b77 100644 --- a/helper/pgpkeys/keybase.go +++ b/helper/pgpkeys/keybase.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pgpkeys import ( diff --git a/helper/pgpkeys/keybase_test.go b/helper/pgpkeys/keybase_test.go index 7d59899fd9e538..3faa3f5d8db2ee 100644 --- a/helper/pgpkeys/keybase_test.go +++ b/helper/pgpkeys/keybase_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pgpkeys import ( diff --git a/helper/pgpkeys/test_keys.go b/helper/pgpkeys/test_keys.go index be97698d1216ed..c10a9055ed00e9 100644 --- a/helper/pgpkeys/test_keys.go +++ b/helper/pgpkeys/test_keys.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pgpkeys const ( diff --git a/helper/policies/policies.go b/helper/policies/policies.go index 2a34602057980d..729ce10b2fc65e 100644 --- a/helper/policies/policies.go +++ b/helper/policies/policies.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package policies import "sort" diff --git a/helper/policies/policies_test.go b/helper/policies/policies_test.go index 6356dee18a0437..ba9b0a8f70b05f 100644 --- a/helper/policies/policies_test.go +++ b/helper/policies/policies_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package policies import "testing" diff --git a/helper/proxyutil/proxyutil.go b/helper/proxyutil/proxyutil.go index b0f06d6b9f260a..fdb20973e003f0 100644 --- a/helper/proxyutil/proxyutil.go +++ b/helper/proxyutil/proxyutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package proxyutil import ( diff --git a/helper/random/parser.go b/helper/random/parser.go index c5e82c8c1e5a12..3184db8aa5c624 100644 --- a/helper/random/parser.go +++ b/helper/random/parser.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/parser_test.go b/helper/random/parser_test.go index f8af5a5eb920bf..59cdb814304383 100644 --- a/helper/random/parser_test.go +++ b/helper/random/parser_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/random_api.go b/helper/random/random_api.go index 5bb9316b15be50..9bc89f9af7e54f 100644 --- a/helper/random/random_api.go +++ b/helper/random/random_api.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/registry.go b/helper/random/registry.go index 334df734e24ef1..efdcf5c302f584 100644 --- a/helper/random/registry.go +++ b/helper/random/registry.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/registry_test.go b/helper/random/registry_test.go index 21297aaf21f591..3d7060650b4fd4 100644 --- a/helper/random/registry_test.go +++ b/helper/random/registry_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/rules.go b/helper/random/rules.go index 05cc800c91e886..fead5b4ffe65d6 100644 --- a/helper/random/rules.go +++ b/helper/random/rules.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/rules_test.go b/helper/random/rules_test.go index e85df503b0a13c..18aa0087982656 100644 --- a/helper/random/rules_test.go +++ b/helper/random/rules_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/serializing.go b/helper/random/serializing.go index 5b68d327587400..93371df0287843 100644 --- a/helper/random/serializing.go +++ b/helper/random/serializing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/serializing_test.go b/helper/random/serializing_test.go index bfa17ae266e701..171053742993e8 100644 --- a/helper/random/serializing_test.go +++ b/helper/random/serializing_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/string_generator.go b/helper/random/string_generator.go index 48f08ed8924afe..e4ff66cd3930e6 100644 --- a/helper/random/string_generator.go +++ b/helper/random/string_generator.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/random/string_generator_test.go b/helper/random/string_generator_test.go index 8307ff73a485cb..af4e7da149626d 100644 --- a/helper/random/string_generator_test.go +++ b/helper/random/string_generator_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package random import ( diff --git a/helper/storagepacker/storagepacker.go b/helper/storagepacker/storagepacker.go index 2e69f3a27d7a34..4633d523bea536 100644 --- a/helper/storagepacker/storagepacker.go +++ b/helper/storagepacker/storagepacker.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package storagepacker import ( diff --git a/helper/storagepacker/storagepacker_test.go b/helper/storagepacker/storagepacker_test.go index ad76107afbf42f..cc2448b2bc90ae 100644 --- a/helper/storagepacker/storagepacker_test.go +++ b/helper/storagepacker/storagepacker_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package storagepacker import ( diff --git a/helper/storagepacker/types.pb.go b/helper/storagepacker/types.pb.go index 6dd58b96d37d69..0afd42f6035162 100644 --- a/helper/storagepacker/types.pb.go +++ b/helper/storagepacker/types.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/helper/storagepacker/types.proto b/helper/storagepacker/types.proto index 7efb0a11ef9837..4edfaf4f857219 100644 --- a/helper/storagepacker/types.proto +++ b/helper/storagepacker/types.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/helper/storagepacker"; diff --git a/helper/testhelpers/azurite/azurite.go b/helper/testhelpers/azurite/azurite.go index a538e5c87eb032..13d65750d4cecd 100644 --- a/helper/testhelpers/azurite/azurite.go +++ b/helper/testhelpers/azurite/azurite.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package azurite import ( @@ -10,7 +7,7 @@ import ( "testing" "github.com/Azure/azure-storage-blob-go/azblob" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" ) type Config struct { diff --git a/helper/testhelpers/cassandra/cassandrahelper.go b/helper/testhelpers/cassandra/cassandrahelper.go index 8e970e836a9ccf..899136f16c9a49 100644 --- a/helper/testhelpers/cassandra/cassandrahelper.go +++ b/helper/testhelpers/cassandra/cassandrahelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cassandra import ( @@ -9,13 +6,11 @@ import ( "net" "os" "path/filepath" - "runtime" - "strings" "testing" "time" "github.com/gocql/gocql" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" ) type containerConfig struct { @@ -82,12 +77,6 @@ func (h Host) ConnectionURL() string { func PrepareTestContainer(t *testing.T, opts ...ContainerOpt) (Host, func()) { t.Helper() - - // Skipping on ARM, as this image can't run on ARM architecture - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as this image is not supported on ARM architectures") - } - if os.Getenv("CASSANDRA_HOSTS") != "" { host, port, err := net.SplitHostPort(os.Getenv("CASSANDRA_HOSTS")) if err != nil { diff --git a/helper/testhelpers/certhelpers/cert_helpers.go b/helper/testhelpers/certhelpers/cert_helpers.go index 42692d01f6a6ca..b84bbf961e5a26 100644 --- a/helper/testhelpers/certhelpers/cert_helpers.go +++ b/helper/testhelpers/certhelpers/cert_helpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package certhelpers import ( diff --git a/helper/testhelpers/consul/consulhelper.go b/helper/testhelpers/consul/consulhelper.go index e8c160b8f6aa59..7d7984b17495ca 100644 --- a/helper/testhelpers/consul/consulhelper.go +++ b/helper/testhelpers/consul/consulhelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( @@ -11,7 +8,7 @@ import ( consulapi "github.com/hashicorp/consul/api" goversion "github.com/hashicorp/go-version" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" ) type Config struct { diff --git a/helper/testhelpers/corehelpers/corehelpers.go b/helper/testhelpers/corehelpers/corehelpers.go index 846db21da17a9a..db94ecc94a2f92 100644 --- a/helper/testhelpers/corehelpers/corehelpers.go +++ b/helper/testhelpers/corehelpers/corehelpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Package corehelpers contains testhelpers that don't depend on package vault, // and thus can be used within vault (as well as elsewhere.) package corehelpers diff --git a/sdk/helper/docker/testhelpers.go b/helper/testhelpers/docker/testhelpers.go similarity index 65% rename from sdk/helper/docker/testhelpers.go rename to helper/testhelpers/docker/testhelpers.go index 7902750d6dc529..51b35cb1631985 100644 --- a/sdk/helper/docker/testhelpers.go +++ b/helper/testhelpers/docker/testhelpers.go @@ -1,11 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package docker import ( "archive/tar" - "bufio" "bytes" "context" "encoding/base64" @@ -17,14 +13,11 @@ import ( "os" "strconv" "strings" - "sync" "time" "github.com/cenkalti/backoff/v3" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" - "github.com/docker/docker/api/types/filters" - "github.com/docker/docker/api/types/mount" "github.com/docker/docker/api/types/network" "github.com/docker/docker/api/types/strslice" "github.com/docker/docker/client" @@ -34,62 +27,33 @@ import ( "github.com/hashicorp/go-uuid" ) -const DockerAPIVersion = "1.40" - type Runner struct { DockerAPI *client.Client RunOptions RunOptions } type RunOptions struct { - ImageRepo string - ImageTag string - ContainerName string - Cmd []string - Entrypoint []string - Env []string - NetworkName string - NetworkID string - CopyFromTo map[string]string - Ports []string - DoNotAutoRemove bool - AuthUsername string - AuthPassword string - OmitLogTimestamps bool - LogConsumer func(string) - Capabilities []string - PreDelete bool - PostStart func(string, string) error - LogStderr io.Writer - LogStdout io.Writer - VolumeNameToMountPoint map[string]string -} - -func NewDockerAPI() (*client.Client, error) { - return client.NewClientWithOpts(client.FromEnv, client.WithVersion(DockerAPIVersion)) + ImageRepo string + ImageTag string + ContainerName string + Cmd []string + Entrypoint []string + Env []string + NetworkID string + CopyFromTo map[string]string + Ports []string + DoNotAutoRemove bool + AuthUsername string + AuthPassword string + LogConsumer func(string) } func NewServiceRunner(opts RunOptions) (*Runner, error) { - dapi, err := NewDockerAPI() + dapi, err := client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.39")) if err != nil { return nil, err } - if opts.NetworkName == "" { - opts.NetworkName = os.Getenv("TEST_DOCKER_NETWORK_NAME") - } - if opts.NetworkName != "" { - nets, err := dapi.NetworkList(context.TODO(), types.NetworkListOptions{ - Filters: filters.NewArgs(filters.Arg("name", opts.NetworkName)), - }) - if err != nil { - return nil, err - } - if len(nets) != 1 { - return nil, fmt.Errorf("expected exactly one docker network named %q, got %d", opts.NetworkName, len(nets)) - } - opts.NetworkID = nets[0].ID - } if opts.NetworkID == "" { opts.NetworkID = os.Getenv("TEST_DOCKER_NETWORK_ID") } @@ -181,112 +145,38 @@ func (d *Runner) StartService(ctx context.Context, connect ServiceAdapter) (*Ser return serv, err } -type LogConsumerWriter struct { - consumer func(string) -} - -func (l LogConsumerWriter) Write(p []byte) (n int, err error) { - // TODO this assumes that we're never passed partial log lines, which - // seems a safe assumption for now based on how docker looks to implement - // logging, but might change in the future. - scanner := bufio.NewScanner(bytes.NewReader(p)) - scanner.Buffer(make([]byte, 64*1024), bufio.MaxScanTokenSize) - for scanner.Scan() { - l.consumer(scanner.Text()) - } - return len(p), nil -} - -var _ io.Writer = &LogConsumerWriter{} - // StartNewService will start the runner's configured docker container but with the // ability to control adding a name suffix or forcing a local address to be returned. // 'addSuffix' will add a random UUID to the end of the container name. // 'forceLocalAddr' will force the container address returned to be in the // form of '127.0.0.1:1234' where 1234 is the mapped container port. func (d *Runner) StartNewService(ctx context.Context, addSuffix, forceLocalAddr bool, connect ServiceAdapter) (*Service, string, error) { - if d.RunOptions.PreDelete { - name := d.RunOptions.ContainerName - matches, err := d.DockerAPI.ContainerList(ctx, types.ContainerListOptions{ - All: true, - // TODO use labels to ensure we don't delete anything we shouldn't - Filters: filters.NewArgs( - filters.Arg("name", name), - ), - }) - if err != nil { - return nil, "", fmt.Errorf("failed to list containers named %q", name) - } - for _, cont := range matches { - err = d.DockerAPI.ContainerRemove(ctx, cont.ID, types.ContainerRemoveOptions{Force: true}) - if err != nil { - return nil, "", fmt.Errorf("failed to pre-delete container named %q", name) - } - } - } - result, err := d.Start(context.Background(), addSuffix, forceLocalAddr) + container, hostIPs, containerID, err := d.Start(context.Background(), addSuffix, forceLocalAddr) if err != nil { return nil, "", err } - var wg sync.WaitGroup - consumeLogs := false - var logStdout, logStderr io.Writer - if d.RunOptions.LogStdout != nil && d.RunOptions.LogStderr != nil { - consumeLogs = true - logStdout = d.RunOptions.LogStdout - logStderr = d.RunOptions.LogStderr - } else if d.RunOptions.LogConsumer != nil { - consumeLogs = true - logStdout = &LogConsumerWriter{d.RunOptions.LogConsumer} - logStderr = &LogConsumerWriter{d.RunOptions.LogConsumer} - } - - // The waitgroup wg is used here to support some stuff in NewDockerCluster. - // We can't generate the PKI cert for the https listener until we know the - // container's address, meaning we must first start the container, then - // generate the cert, then copy it into the container, then signal Vault - // to reload its config/certs. However, if we SIGHUP Vault before Vault - // has installed its signal handler, that will kill Vault, since the default - // behaviour for HUP is termination. So the PostStart that NewDockerCluster - // passes in (which does all that PKI cert stuff) waits to see output from - // Vault on stdout/stderr before it sends the signal, and we don't want to - // run the PostStart until we've hooked into the docker logs. - if consumeLogs { - wg.Add(1) - go func() { - // We must run inside a goroutine because we're using Follow:true, - // and StdCopy will block until the log stream is closed. - stream, err := d.DockerAPI.ContainerLogs(context.Background(), result.Container.ID, types.ContainerLogsOptions{ + cleanup := func() { + if d.RunOptions.LogConsumer != nil { + rc, err := d.DockerAPI.ContainerLogs(ctx, container.ID, types.ContainerLogsOptions{ ShowStdout: true, ShowStderr: true, - Timestamps: !d.RunOptions.OmitLogTimestamps, + Timestamps: true, Details: true, - Follow: true, }) - wg.Done() - if err != nil { - d.RunOptions.LogConsumer(fmt.Sprintf("error reading container logs: %v", err)) - } else { - _, err := stdcopy.StdCopy(logStdout, logStderr, stream) + if err == nil { + b, err := ioutil.ReadAll(rc) if err != nil { - d.RunOptions.LogConsumer(fmt.Sprintf("error demultiplexing docker logs: %v", err)) + d.RunOptions.LogConsumer(fmt.Sprintf("error reading container logs, err=%v, read: %s", err, string(b))) + } else { + d.RunOptions.LogConsumer(string(b)) } } - }() - } - wg.Wait() - - if d.RunOptions.PostStart != nil { - if err := d.RunOptions.PostStart(result.Container.ID, result.RealIP); err != nil { - return nil, "", fmt.Errorf("poststart failed: %w", err) } - } - cleanup := func() { for i := 0; i < 10; i++ { - err := d.DockerAPI.ContainerRemove(ctx, result.Container.ID, types.ContainerRemoveOptions{Force: true}) - if err == nil || client.IsErrNotFound(err) { + err := d.DockerAPI.ContainerRemove(ctx, container.ID, types.ContainerRemoveOptions{Force: true}) + if err == nil { return } time.Sleep(1 * time.Second) @@ -297,7 +187,7 @@ func (d *Runner) StartNewService(ctx context.Context, addSuffix, forceLocalAddr bo.MaxInterval = time.Second * 5 bo.MaxElapsedTime = 2 * time.Minute - pieces := strings.Split(result.Addrs[0], ":") + pieces := strings.Split(hostIPs[0], ":") portInt, err := strconv.Atoi(pieces[1]) if err != nil { return nil, "", err @@ -305,11 +195,6 @@ func (d *Runner) StartNewService(ctx context.Context, addSuffix, forceLocalAddr var config ServiceConfig err = backoff.Retry(func() error { - container, err := d.DockerAPI.ContainerInspect(ctx, result.Container.ID) - if err != nil || !container.State.Running { - return backoff.Permanent(fmt.Errorf("failed inspect or container %q not running: %w", result.Container.ID, err)) - } - c, err := connect(ctx, pieces[0], portInt) if err != nil { return err @@ -329,32 +214,24 @@ func (d *Runner) StartNewService(ctx context.Context, addSuffix, forceLocalAddr } return &Service{ - Config: config, - Cleanup: cleanup, - Container: result.Container, - StartResult: result, - }, result.Container.ID, nil + Config: config, + Cleanup: cleanup, + Container: container, + }, containerID, nil } type Service struct { - Config ServiceConfig - Cleanup func() - Container *types.ContainerJSON - StartResult *StartResult -} - -type StartResult struct { + Config ServiceConfig + Cleanup func() Container *types.ContainerJSON - Addrs []string - RealIP string } -func (d *Runner) Start(ctx context.Context, addSuffix, forceLocalAddr bool) (*StartResult, error) { +func (d *Runner) Start(ctx context.Context, addSuffix, forceLocalAddr bool) (*types.ContainerJSON, []string, string, error) { name := d.RunOptions.ContainerName if addSuffix { suffix, err := uuid.GenerateUUID() if err != nil { - return nil, err + return nil, nil, "", err } name += "-" + suffix } @@ -379,9 +256,6 @@ func (d *Runner) Start(ctx context.Context, addSuffix, forceLocalAddr bool) (*St AutoRemove: !d.RunOptions.DoNotAutoRemove, PublishAllPorts: true, } - if len(d.RunOptions.Capabilities) > 0 { - hostConfig.CapAdd = d.RunOptions.Capabilities - } netConfig := &network.NetworkingConfig{} if d.RunOptions.NetworkID != "" { @@ -399,7 +273,7 @@ func (d *Runner) Start(ctx context.Context, addSuffix, forceLocalAddr bool) (*St "password": d.RunOptions.AuthPassword, } if err := json.NewEncoder(&buf).Encode(auth); err != nil { - return nil, err + return nil, nil, "", err } opts.RegistryAuth = base64.URLEncoding.EncodeToString(buf.Bytes()) } @@ -408,85 +282,48 @@ func (d *Runner) Start(ctx context.Context, addSuffix, forceLocalAddr bool) (*St _, _ = ioutil.ReadAll(resp) } - for vol, mtpt := range d.RunOptions.VolumeNameToMountPoint { - hostConfig.Mounts = append(hostConfig.Mounts, mount.Mount{ - Type: "volume", - Source: vol, - Target: mtpt, - ReadOnly: false, - }) - } - c, err := d.DockerAPI.ContainerCreate(ctx, cfg, hostConfig, netConfig, nil, cfg.Hostname) if err != nil { - return nil, fmt.Errorf("container create failed: %v", err) + return nil, nil, "", fmt.Errorf("container create failed: %v", err) } for from, to := range d.RunOptions.CopyFromTo { if err := copyToContainer(ctx, d.DockerAPI, c.ID, from, to); err != nil { _ = d.DockerAPI.ContainerRemove(ctx, c.ID, types.ContainerRemoveOptions{}) - return nil, err + return nil, nil, "", err } } err = d.DockerAPI.ContainerStart(ctx, c.ID, types.ContainerStartOptions{}) if err != nil { _ = d.DockerAPI.ContainerRemove(ctx, c.ID, types.ContainerRemoveOptions{}) - return nil, fmt.Errorf("container start failed: %v", err) + return nil, nil, "", fmt.Errorf("container start failed: %v", err) } inspect, err := d.DockerAPI.ContainerInspect(ctx, c.ID) if err != nil { _ = d.DockerAPI.ContainerRemove(ctx, c.ID, types.ContainerRemoveOptions{}) - return nil, err + return nil, nil, "", err } var addrs []string for _, port := range d.RunOptions.Ports { pieces := strings.Split(port, "/") if len(pieces) < 2 { - return nil, fmt.Errorf("expected port of the form 1234/tcp, got: %s", port) + return nil, nil, "", fmt.Errorf("expected port of the form 1234/tcp, got: %s", port) } if d.RunOptions.NetworkID != "" && !forceLocalAddr { addrs = append(addrs, fmt.Sprintf("%s:%s", cfg.Hostname, pieces[0])) } else { mapped, ok := inspect.NetworkSettings.Ports[nat.Port(port)] if !ok || len(mapped) == 0 { - return nil, fmt.Errorf("no port mapping found for %s", port) + return nil, nil, "", fmt.Errorf("no port mapping found for %s", port) } addrs = append(addrs, fmt.Sprintf("127.0.0.1:%s", mapped[0].HostPort)) } } - var realIP string - if d.RunOptions.NetworkID == "" { - if len(inspect.NetworkSettings.Networks) > 1 { - return nil, fmt.Errorf("Set d.RunOptions.NetworkName instead for container with multiple networks: %v", inspect.NetworkSettings.Networks) - } - for _, network := range inspect.NetworkSettings.Networks { - realIP = network.IPAddress - break - } - } else { - realIP = inspect.NetworkSettings.Networks[d.RunOptions.NetworkName].IPAddress - } - - return &StartResult{ - Container: &inspect, - Addrs: addrs, - RealIP: realIP, - }, nil -} - -func (d *Runner) RefreshFiles(ctx context.Context, containerID string) error { - for from, to := range d.RunOptions.CopyFromTo { - if err := copyToContainer(ctx, d.DockerAPI, containerID, from, to); err != nil { - // TODO too drastic? - _ = d.DockerAPI.ContainerRemove(ctx, containerID, types.ContainerRemoveOptions{}) - return err - } - } - return d.DockerAPI.ContainerKill(ctx, containerID, "SIGHUP") + return &inspect, addrs, c.ID, nil } func (d *Runner) Stop(ctx context.Context, containerID string) error { @@ -496,12 +333,8 @@ func (d *Runner) Stop(ctx context.Context, containerID string) error { } } - // timeout in seconds - timeout := 5 - options := container.StopOptions{ - Timeout: &timeout, - } - if err := d.DockerAPI.ContainerStop(ctx, containerID, options); err != nil { + timeout := 5 * time.Second + if err := d.DockerAPI.ContainerStop(ctx, containerID, &timeout); err != nil { return fmt.Errorf("error stopping container: %v", err) } @@ -561,10 +394,6 @@ func (u RunCmdUser) Apply(cfg *types.ExecConfig) error { } func (d *Runner) RunCmdWithOutput(ctx context.Context, container string, cmd []string, opts ...RunCmdOpt) ([]byte, []byte, int, error) { - return RunCmdWithOutput(d.DockerAPI, ctx, container, cmd, opts...) -} - -func RunCmdWithOutput(api *client.Client, ctx context.Context, container string, cmd []string, opts ...RunCmdOpt) ([]byte, []byte, int, error) { runCfg := types.ExecConfig{ AttachStdout: true, AttachStderr: true, @@ -577,12 +406,12 @@ func RunCmdWithOutput(api *client.Client, ctx context.Context, container string, } } - ret, err := api.ContainerExecCreate(ctx, container, runCfg) + ret, err := d.DockerAPI.ContainerExecCreate(ctx, container, runCfg) if err != nil { return nil, nil, -1, fmt.Errorf("error creating execution environment: %v\ncfg: %v\n", err, runCfg) } - resp, err := api.ContainerExecAttach(ctx, ret.ID, types.ExecStartCheck{}) + resp, err := d.DockerAPI.ContainerExecAttach(ctx, ret.ID, types.ExecStartCheck{}) if err != nil { return nil, nil, -1, fmt.Errorf("error attaching to command execution: %v\ncfg: %v\nret: %v\n", err, runCfg, ret) } @@ -598,7 +427,7 @@ func RunCmdWithOutput(api *client.Client, ctx context.Context, container string, stderr := stderrB.Bytes() // Fetch return code. - info, err := api.ContainerExecInspect(ctx, ret.ID) + info, err := d.DockerAPI.ContainerExecInspect(ctx, ret.ID) if err != nil { return stdout, stderr, -1, fmt.Errorf("error reading command exit code: %v", err) } @@ -607,10 +436,6 @@ func RunCmdWithOutput(api *client.Client, ctx context.Context, container string, } func (d *Runner) RunCmdInBackground(ctx context.Context, container string, cmd []string, opts ...RunCmdOpt) (string, error) { - return RunCmdInBackground(d.DockerAPI, ctx, container, cmd, opts...) -} - -func RunCmdInBackground(api *client.Client, ctx context.Context, container string, cmd []string, opts ...RunCmdOpt) (string, error) { runCfg := types.ExecConfig{ AttachStdout: true, AttachStderr: true, @@ -623,12 +448,12 @@ func RunCmdInBackground(api *client.Client, ctx context.Context, container strin } } - ret, err := api.ContainerExecCreate(ctx, container, runCfg) + ret, err := d.DockerAPI.ContainerExecCreate(ctx, container, runCfg) if err != nil { return "", fmt.Errorf("error creating execution environment: %w\ncfg: %v\n", err, runCfg) } - err = api.ContainerExecStart(ctx, ret.ID, types.ExecStartCheck{}) + err = d.DockerAPI.ContainerExecStart(ctx, ret.ID, types.ExecStartCheck{}) if err != nil { return "", fmt.Errorf("error starting command execution: %w\ncfg: %v\nret: %v\n", err, runCfg, ret) } @@ -640,8 +465,6 @@ func RunCmdInBackground(api *client.Client, ctx context.Context, container strin type PathContents interface { UpdateHeader(header *tar.Header) error Get() ([]byte, error) - SetMode(mode int64) - SetOwners(uid int, gid int) } type FileContents struct { @@ -662,17 +485,8 @@ func (b FileContents) Get() ([]byte, error) { return b.Data, nil } -func (b *FileContents) SetMode(mode int64) { - b.Mode = mode -} - -func (b *FileContents) SetOwners(uid int, gid int) { - b.UID = uid - b.GID = gid -} - func PathContentsFromBytes(data []byte) PathContents { - return &FileContents{ + return FileContents{ Data: data, Mode: 0o644, } @@ -712,7 +526,7 @@ func BuildContextFromTarball(reader io.Reader) (BuildContext, error) { return nil, fmt.Errorf("unexpectedly short read on tarball: %v of %v", read, header.Size) } - bCtx[header.Name] = &FileContents{ + bCtx[header.Name] = FileContents{ Data: data, Mode: header.Mode, UID: header.Uid, @@ -729,14 +543,8 @@ func (bCtx *BuildContext) ToTarball() (io.Reader, error) { tarBuilder := tar.NewWriter(buffer) defer tarBuilder.Close() - now := time.Now() for filepath, contents := range *bCtx { - fileHeader := &tar.Header{ - Name: filepath, - ModTime: now, - AccessTime: now, - ChangeTime: now, - } + fileHeader := &tar.Header{Name: filepath} if contents == nil && !strings.HasSuffix(filepath, "/") { return nil, fmt.Errorf("expected file path (%v) to have trailing / due to nil contents, indicating directory", filepath) } @@ -821,10 +629,6 @@ func (u BuildTags) Apply(cfg *types.ImageBuildOptions) error { const containerfilePath = "_containerfile" func (d *Runner) BuildImage(ctx context.Context, containerfile string, containerContext BuildContext, opts ...BuildOpt) ([]byte, error) { - return BuildImage(ctx, d.DockerAPI, containerfile, containerContext, opts...) -} - -func BuildImage(ctx context.Context, api *client.Client, containerfile string, containerContext BuildContext, opts ...BuildOpt) ([]byte, error) { var cfg types.ImageBuildOptions // Build container context tarball, provisioning containerfile in. @@ -842,7 +646,7 @@ func BuildImage(ctx context.Context, api *client.Client, containerfile string, c } } - resp, err := api.ImageBuild(ctx, tar, cfg) + resp, err := d.DockerAPI.ImageBuild(ctx, tar, cfg) if err != nil { return nil, fmt.Errorf("failed to build image: %v", err) } diff --git a/helper/testhelpers/etcd/etcdhelper.go b/helper/testhelpers/etcd/etcdhelper.go index a5b2578439c98e..1051dd1405a442 100644 --- a/helper/testhelpers/etcd/etcdhelper.go +++ b/helper/testhelpers/etcd/etcdhelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package etcd import ( @@ -11,7 +8,7 @@ import ( "testing" "time" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" clientv3 "go.etcd.io/etcd/client/v3" ) diff --git a/helper/testhelpers/fakegcsserver/fake-gcs-server.go b/helper/testhelpers/fakegcsserver/fake-gcs-server.go index 503824e99e2dfb..ed83970d17af69 100644 --- a/helper/testhelpers/fakegcsserver/fake-gcs-server.go +++ b/helper/testhelpers/fakegcsserver/fake-gcs-server.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package fakegcsserver import ( @@ -12,7 +9,7 @@ import ( "testing" "cloud.google.com/go/storage" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" "google.golang.org/api/iterator" "google.golang.org/api/option" ) diff --git a/helper/testhelpers/ldap/ldaphelper.go b/helper/testhelpers/ldap/ldaphelper.go index 7eebc134d58153..79587ec6d9d19f 100644 --- a/helper/testhelpers/ldap/ldaphelper.go +++ b/helper/testhelpers/ldap/ldaphelper.go @@ -1,26 +1,16 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldap import ( "context" "fmt" - "runtime" - "strings" "testing" hclog "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" "github.com/hashicorp/vault/sdk/helper/ldaputil" ) func PrepareTestContainer(t *testing.T, version string) (cleanup func(), cfg *ldaputil.ConfigEntry) { - // Skipping on ARM, as this image can't run on ARM architecture - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as this image is not supported on ARM architectures") - } - runner, err := docker.NewServiceRunner(docker.RunOptions{ // Currently set to "michelvocks" until https://github.com/rroemhild/docker-test-openldap/pull/14 // has been merged. diff --git a/helper/testhelpers/logical/testing.go b/helper/testhelpers/logical/testing.go index f634be2f39b84e..79be91f7a6f184 100644 --- a/helper/testhelpers/logical/testing.go +++ b/helper/testhelpers/logical/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package testing import ( diff --git a/helper/testhelpers/logical/testing_test.go b/helper/testhelpers/logical/testing_test.go index 9f2d74b72f10b4..5a4096bfc74b57 100644 --- a/helper/testhelpers/logical/testing_test.go +++ b/helper/testhelpers/logical/testing_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package testing import ( diff --git a/helper/testhelpers/minimal/minimal.go b/helper/testhelpers/minimal/minimal.go deleted file mode 100644 index a13ddf0c014110..00000000000000 --- a/helper/testhelpers/minimal/minimal.go +++ /dev/null @@ -1,81 +0,0 @@ -package minimal - -import ( - "github.com/hashicorp/go-hclog" - logicalKv "github.com/hashicorp/vault-plugin-secrets-kv" - "github.com/hashicorp/vault/audit" - auditFile "github.com/hashicorp/vault/builtin/audit/file" - auditSocket "github.com/hashicorp/vault/builtin/audit/socket" - auditSyslog "github.com/hashicorp/vault/builtin/audit/syslog" - logicalDb "github.com/hashicorp/vault/builtin/logical/database" - "github.com/hashicorp/vault/builtin/plugin" - "github.com/hashicorp/vault/helper/builtinplugins" - "github.com/hashicorp/vault/http" - "github.com/hashicorp/vault/sdk/helper/logging" - "github.com/hashicorp/vault/sdk/logical" - "github.com/hashicorp/vault/sdk/physical/inmem" - "github.com/hashicorp/vault/vault" - "github.com/mitchellh/copystructure" - "github.com/mitchellh/go-testing-interface" -) - -// NewTestSoloCluster is a simpler version of NewTestCluster that only creates -// single-node clusters. It is intentionally minimalist, if you need something -// from vault.TestClusterOptions, use NewTestCluster instead. It should work fine -// with a nil config argument. There is no need to call Start or Cleanup or -// TestWaitActive on the resulting cluster. -func NewTestSoloCluster(t testing.T, config *vault.CoreConfig) *vault.TestCluster { - logger := logging.NewVaultLogger(hclog.Trace).Named(t.Name()) - - mycfg := &vault.CoreConfig{} - - if config != nil { - // It's rude to modify an input argument as a side-effect - copy, err := copystructure.Copy(config) - if err != nil { - t.Fatal(err) - } - mycfg = copy.(*vault.CoreConfig) - } - if mycfg.Physical == nil { - // Don't use NewTransactionalInmem because that would enable replication, - // which we don't care about in our case (use NewTestCluster for that.) - inm, err := inmem.NewInmem(nil, logger) - if err != nil { - t.Fatal(err) - } - mycfg.Physical = inm - } - if mycfg.CredentialBackends == nil { - mycfg.CredentialBackends = map[string]logical.Factory{ - "plugin": plugin.Factory, - } - } - if mycfg.LogicalBackends == nil { - mycfg.LogicalBackends = map[string]logical.Factory{ - "plugin": plugin.Factory, - "database": logicalDb.Factory, - // This is also available in the plugin catalog, but is here due to the need to - // automatically mount it. - "kv": logicalKv.Factory, - } - } - if mycfg.AuditBackends == nil { - mycfg.AuditBackends = map[string]audit.Factory{ - "file": auditFile.Factory, - "socket": auditSocket.Factory, - "syslog": auditSyslog.Factory, - } - } - if mycfg.BuiltinRegistry == nil { - mycfg.BuiltinRegistry = builtinplugins.Registry - } - - cluster := vault.NewTestCluster(t, mycfg, &vault.TestClusterOptions{ - NumCores: 1, - HandlerFunc: http.Handler, - Logger: logger, - }) - t.Cleanup(cluster.Cleanup) - return cluster -} diff --git a/helper/testhelpers/minio/miniohelper.go b/helper/testhelpers/minio/miniohelper.go index 5550a12106a364..2969ce21546a18 100644 --- a/helper/testhelpers/minio/miniohelper.go +++ b/helper/testhelpers/minio/miniohelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package minio import ( @@ -14,7 +11,7 @@ import ( "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" ) type Config struct { diff --git a/helper/testhelpers/mongodb/mongodbhelper.go b/helper/testhelpers/mongodb/mongodbhelper.go index 1f7afe30c3139d..c4288a4b551350 100644 --- a/helper/testhelpers/mongodb/mongodbhelper.go +++ b/helper/testhelpers/mongodb/mongodbhelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mongodb import ( @@ -10,7 +7,7 @@ import ( "testing" "time" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/mongo/readpref" diff --git a/helper/testhelpers/mssql/mssqlhelper.go b/helper/testhelpers/mssql/mssqlhelper.go index 908e0277cc2d5f..01bfd54b53872c 100644 --- a/helper/testhelpers/mssql/mssqlhelper.go +++ b/helper/testhelpers/mssql/mssqlhelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mssqlhelper import ( @@ -9,11 +6,9 @@ import ( "fmt" "net/url" "os" - "runtime" - "strings" "testing" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" ) const mssqlPassword = "yourStrong(!)Password" @@ -24,10 +19,6 @@ const mssqlPassword = "yourStrong(!)Password" const numRetries = 3 func PrepareMSSQLTestContainer(t *testing.T) (cleanup func(), retURL string) { - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as this image is not supported on ARM architectures") - } - if os.Getenv("MSSQL_URL") != "" { return func() {}, os.Getenv("MSSQL_URL") } diff --git a/helper/testhelpers/mysql/mysqlhelper.go b/helper/testhelpers/mysql/mysqlhelper.go index 19887a93e2ce90..82b47f07e1e7d7 100644 --- a/helper/testhelpers/mysql/mysqlhelper.go +++ b/helper/testhelpers/mysql/mysqlhelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mysqlhelper import ( @@ -8,11 +5,10 @@ import ( "database/sql" "fmt" "os" - "runtime" "strings" "testing" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" ) type Config struct { @@ -27,12 +23,6 @@ func PrepareTestContainer(t *testing.T, legacy bool, pw string) (func(), string) return func() {}, os.Getenv("MYSQL_URL") } - // ARM64 is only supported on MySQL 8.0 and above. If we update - // our image and support to 8.0, we can unskip these tests. - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as MySQL 5.7 is not supported on ARM architectures") - } - imageVersion := "5.7" if legacy { imageVersion = "5.6" diff --git a/helper/testhelpers/pluginhelpers/pluginhelpers.go b/helper/testhelpers/pluginhelpers/pluginhelpers.go index e9a0067044927f..4ce0d935720cfa 100644 --- a/helper/testhelpers/pluginhelpers/pluginhelpers.go +++ b/helper/testhelpers/pluginhelpers/pluginhelpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Package pluginhelpers contains testhelpers that don't depend on package // vault, and thus can be used within vault (as well as elsewhere.) package pluginhelpers @@ -74,17 +71,13 @@ func CompilePlugin(t testing.T, typ consts.PluginType, pluginVersion string, plu dir := "" var err error - pluginRootDir := "builtin" - if typ == consts.PluginTypeDatabase { - pluginRootDir = "plugins" - } for { dir, err = os.Getwd() if err != nil { t.Fatal(err) } // detect if we are in a subdirectory or the root directory and compensate - if _, err := os.Stat(pluginRootDir); os.IsNotExist(err) { + if _, err := os.Stat("builtin"); os.IsNotExist(err) { err := os.Chdir("..") if err != nil { t.Fatal(err) diff --git a/helper/testhelpers/postgresql/postgresqlhelper.go b/helper/testhelpers/postgresql/postgresqlhelper.go index 4f90177cc2cbda..17b2151abb52ee 100644 --- a/helper/testhelpers/postgresql/postgresqlhelper.go +++ b/helper/testhelpers/postgresql/postgresqlhelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package postgresql import ( @@ -11,7 +8,7 @@ import ( "os" "testing" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" ) func PrepareTestContainer(t *testing.T, version string) (func(), string) { @@ -25,26 +22,6 @@ func PrepareTestContainer(t *testing.T, version string) (func(), string) { return cleanup, url } -// PrepareTestContainerWithVaultUser will setup a test container with a Vault -// admin user configured so that we can safely call rotate-root without -// rotating the root DB credentials -func PrepareTestContainerWithVaultUser(t *testing.T, ctx context.Context, version string) (func(), string) { - env := []string{ - "POSTGRES_PASSWORD=secret", - "POSTGRES_DB=database", - } - - runner, cleanup, url, id := prepareTestContainer(t, "postgres", "docker.mirror.hashicorp.services/postgres", version, "secret", true, false, false, env) - - cmd := []string{"psql", "-U", "postgres", "-c", "CREATE USER vaultadmin WITH LOGIN PASSWORD 'vaultpass' SUPERUSER"} - _, err := runner.RunCmdInBackground(ctx, id, cmd) - if err != nil { - t.Fatalf("Could not run command (%v) in container: %v", cmd, err) - } - - return cleanup, url -} - func PrepareTestContainerWithPassword(t *testing.T, version, password string) (func(), string) { env := []string{ "POSTGRES_PASSWORD=" + password, diff --git a/helper/testhelpers/seal/sealhelper.go b/helper/testhelpers/seal/sealhelper.go index b05401f1cdbf60..4087f6fc0d9260 100644 --- a/helper/testhelpers/seal/sealhelper.go +++ b/helper/testhelpers/seal/sealhelper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package sealhelper import ( @@ -74,5 +71,7 @@ func (tss *TransitSealServer) MakeSeal(t testing.T, key string) (vault.Seal, err t.Fatalf("error setting wrapper config: %v", err) } - return vault.NewAutoSeal(seal.NewAccess(transitSeal)) + return vault.NewAutoSeal(&seal.Access{ + Wrapper: transitSeal, + }) } diff --git a/helper/testhelpers/testhelpers.go b/helper/testhelpers/testhelpers.go index 6ae51602bfdd8d..55a6e9d9b33a0b 100644 --- a/helper/testhelpers/testhelpers.go +++ b/helper/testhelpers/testhelpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package testhelpers import ( @@ -1047,9 +1044,3 @@ func WaitForNodesExcludingSelectedStandbys(t testing.T, cluster *vault.TestClust } } } - -// IsLocalOrRegressionTests returns true when the tests are running locally (not in CI), or when -// the regression test env var (VAULT_REGRESSION_TESTS) is provided. -func IsLocalOrRegressionTests() bool { - return os.Getenv("CI") == "" || os.Getenv("VAULT_REGRESSION_TESTS") == "true" -} diff --git a/helper/testhelpers/testhelpers_oss.go b/helper/testhelpers/testhelpers_oss.go index fc55e9b52c7368..912d50fdec3b0b 100644 --- a/helper/testhelpers/testhelpers_oss.go +++ b/helper/testhelpers/testhelpers_oss.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package testhelpers diff --git a/helper/testhelpers/teststorage/consul/consul.go b/helper/testhelpers/teststorage/consul/consul.go index bfea5ddbb4aeb9..47ec99f294b76e 100644 --- a/helper/testhelpers/teststorage/consul/consul.go +++ b/helper/testhelpers/teststorage/consul/consul.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/helper/testhelpers/teststorage/teststorage.go b/helper/testhelpers/teststorage/teststorage.go index f065e187d2c284..7a2f220ed7e827 100644 --- a/helper/testhelpers/teststorage/teststorage.go +++ b/helper/testhelpers/teststorage/teststorage.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package teststorage import ( diff --git a/helper/testhelpers/teststorage/teststorage_reusable.go b/helper/testhelpers/teststorage/teststorage_reusable.go index ff9fd2b55668bd..257a5a0184c5fb 100644 --- a/helper/testhelpers/teststorage/teststorage_reusable.go +++ b/helper/testhelpers/teststorage/teststorage_reusable.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package teststorage import ( diff --git a/helper/timeutil/timeutil.go b/helper/timeutil/timeutil.go index 16f8343513e900..a65d3cf908bcbf 100644 --- a/helper/timeutil/timeutil.go +++ b/helper/timeutil/timeutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package timeutil import ( @@ -142,26 +139,3 @@ func SkipAtEndOfMonth(t *testing.T) { t.Skip("too close to end of month") } } - -// This interface allows unit tests to substitute in a simulated Clock. -type Clock interface { - Now() time.Time - NewTicker(time.Duration) *time.Ticker - NewTimer(time.Duration) *time.Timer -} - -type DefaultClock struct{} - -var _ Clock = (*DefaultClock)(nil) - -func (_ DefaultClock) Now() time.Time { - return time.Now() -} - -func (_ DefaultClock) NewTicker(d time.Duration) *time.Ticker { - return time.NewTicker(d) -} - -func (_ DefaultClock) NewTimer(d time.Duration) *time.Timer { - return time.NewTimer(d) -} diff --git a/helper/timeutil/timeutil_test.go b/helper/timeutil/timeutil_test.go index b9ccdbd5ba2262..5cef2d2061fe97 100644 --- a/helper/timeutil/timeutil_test.go +++ b/helper/timeutil/timeutil_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package timeutil import ( diff --git a/helper/useragent/useragent.go b/helper/useragent/useragent.go index a16b8716e352eb..b2fa40a30c8eed 100644 --- a/helper/useragent/useragent.go +++ b/helper/useragent/useragent.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package useragent import ( @@ -32,79 +29,3 @@ func String() string { return fmt.Sprintf("Vault/%s (+%s; %s)", versionFunc(), projectURL, rt) } - -// AgentString returns the consistent user-agent string for Vault Agent. -// -// e.g. Vault Agent/0.10.4 (+https://www.vaultproject.io/; go1.10.1) -func AgentString() string { - return fmt.Sprintf("Vault Agent/%s (+%s; %s)", - versionFunc(), projectURL, rt) -} - -// AgentTemplatingString returns the consistent user-agent string for Vault Agent Templating. -// -// e.g. Vault Agent Templating/0.10.4 (+https://www.vaultproject.io/; go1.10.1) -func AgentTemplatingString() string { - return fmt.Sprintf("Vault Agent Templating/%s (+%s; %s)", - versionFunc(), projectURL, rt) -} - -// AgentProxyString returns the consistent user-agent string for Vault Agent API Proxying. -// -// e.g. Vault Agent API Proxy/0.10.4 (+https://www.vaultproject.io/; go1.10.1) -func AgentProxyString() string { - return fmt.Sprintf("Vault Agent API Proxy/%s (+%s; %s)", - versionFunc(), projectURL, rt) -} - -// AgentProxyStringWithProxiedUserAgent returns the consistent user-agent -// string for Vault Agent API Proxying, keeping the User-Agent of the proxied -// client as an extension to this UserAgent -// -// e.g. Vault Agent API Proxy/0.10.4 (+https://www.vaultproject.io/; go1.10.1); proxiedUserAgent -func AgentProxyStringWithProxiedUserAgent(proxiedUserAgent string) string { - return fmt.Sprintf("Vault Agent API Proxy/%s (+%s; %s); %s", - versionFunc(), projectURL, rt, proxiedUserAgent) -} - -// AgentAutoAuthString returns the consistent user-agent string for Vault Agent Auto-Auth. -// -// e.g. Vault Agent Auto-Auth/0.10.4 (+https://www.vaultproject.io/; go1.10.1) -func AgentAutoAuthString() string { - return fmt.Sprintf("Vault Agent Auto-Auth/%s (+%s; %s)", - versionFunc(), projectURL, rt) -} - -// ProxyString returns the consistent user-agent string for Vault Proxy. -// -// e.g. Vault Proxy/0.10.4 (+https://www.vaultproject.io/; go1.10.1) -func ProxyString() string { - return fmt.Sprintf("Vault Proxy/%s (+%s; %s)", - versionFunc(), projectURL, rt) -} - -// ProxyAPIProxyString returns the consistent user-agent string for Vault Proxy API Proxying. -// -// e.g. Vault Proxy API Proxy/0.10.4 (+https://www.vaultproject.io/; go1.10.1) -func ProxyAPIProxyString() string { - return fmt.Sprintf("Vault Proxy API Proxy/%s (+%s; %s)", - versionFunc(), projectURL, rt) -} - -// ProxyStringWithProxiedUserAgent returns the consistent user-agent -// string for Vault Proxy API Proxying, keeping the User-Agent of the proxied -// client as an extension to this UserAgent -// -// e.g. Vault Proxy API Proxy/0.10.4 (+https://www.vaultproject.io/; go1.10.1); proxiedUserAgent -func ProxyStringWithProxiedUserAgent(proxiedUserAgent string) string { - return fmt.Sprintf("Vault Proxy API Proxy/%s (+%s; %s); %s", - versionFunc(), projectURL, rt, proxiedUserAgent) -} - -// ProxyAutoAuthString returns the consistent user-agent string for Vault Agent Auto-Auth. -// -// e.g. Vault Proxy Auto-Auth/0.10.4 (+https://www.vaultproject.io/; go1.10.1) -func ProxyAutoAuthString() string { - return fmt.Sprintf("Vault Proxy Auto-Auth/%s (+%s; %s)", - versionFunc(), projectURL, rt) -} diff --git a/helper/useragent/useragent_test.go b/helper/useragent/useragent_test.go index af5e2f0622745c..cb0cf32942c9f7 100644 --- a/helper/useragent/useragent_test.go +++ b/helper/useragent/useragent_test.go @@ -1,12 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package useragent import ( "testing" - - "github.com/stretchr/testify/require" ) func TestUserAgent(t *testing.T) { @@ -17,124 +12,7 @@ func TestUserAgent(t *testing.T) { act := String() exp := "Vault/1.2.3 (+https://vault-test.com; go5.0)" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultAgent tests the AgentString() function works -// as expected -func TestUserAgent_VaultAgent(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - - act := AgentString() - - exp := "Vault Agent/1.2.3 (+https://vault-test.com; go5.0)" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultAgentTemplating tests the AgentTemplatingString() function works -// as expected -func TestUserAgent_VaultAgentTemplating(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - - act := AgentTemplatingString() - - exp := "Vault Agent Templating/1.2.3 (+https://vault-test.com; go5.0)" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultAgentProxy tests the AgentProxyString() function works -// as expected -func TestUserAgent_VaultAgentProxy(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - - act := AgentProxyString() - - exp := "Vault Agent API Proxy/1.2.3 (+https://vault-test.com; go5.0)" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultAgentProxyWithProxiedUserAgent tests the AgentProxyStringWithProxiedUserAgent() -// function works as expected -func TestUserAgent_VaultAgentProxyWithProxiedUserAgent(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - userAgent := "my-user-agent" - - act := AgentProxyStringWithProxiedUserAgent(userAgent) - - exp := "Vault Agent API Proxy/1.2.3 (+https://vault-test.com; go5.0); my-user-agent" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultAgentAutoAuth tests the AgentAutoAuthString() function works -// as expected -func TestUserAgent_VaultAgentAutoAuth(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - - act := AgentAutoAuthString() - - exp := "Vault Agent Auto-Auth/1.2.3 (+https://vault-test.com; go5.0)" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultProxy tests the ProxyString() function works -// as expected -func TestUserAgent_VaultProxy(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - - act := ProxyString() - - exp := "Vault Proxy/1.2.3 (+https://vault-test.com; go5.0)" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultProxyAPIProxy tests the ProxyAPIProxyString() function works -// as expected -func TestUserAgent_VaultProxyAPIProxy(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - - act := ProxyAPIProxyString() - - exp := "Vault Proxy API Proxy/1.2.3 (+https://vault-test.com; go5.0)" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultProxyWithProxiedUserAgent tests the ProxyStringWithProxiedUserAgent() -// function works as expected -func TestUserAgent_VaultProxyWithProxiedUserAgent(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - userAgent := "my-user-agent" - - act := ProxyStringWithProxiedUserAgent(userAgent) - - exp := "Vault Proxy API Proxy/1.2.3 (+https://vault-test.com; go5.0); my-user-agent" - require.Equal(t, exp, act) -} - -// TestUserAgent_VaultProxyAutoAuth tests the ProxyAPIProxyString() function works -// as expected -func TestUserAgent_VaultProxyAutoAuth(t *testing.T) { - projectURL = "https://vault-test.com" - rt = "go5.0" - versionFunc = func() string { return "1.2.3" } - - act := ProxyAutoAuthString() - - exp := "Vault Proxy Auto-Auth/1.2.3 (+https://vault-test.com; go5.0)" - require.Equal(t, exp, act) + if exp != act { + t.Errorf("expected %q to be %q", act, exp) + } } diff --git a/helper/versions/version.go b/helper/versions/version.go index 9eb8077b892303..b64dd3d260344b 100644 --- a/helper/versions/version.go +++ b/helper/versions/version.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package versions import ( diff --git a/helper/versions/version_test.go b/helper/versions/version_test.go index 85b46cdd3fbf6d..cc1b3e1c20f2f5 100644 --- a/helper/versions/version_test.go +++ b/helper/versions/version_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package versions import "testing" diff --git a/http/assets.go b/http/assets.go index b60a594942c7dd..c401f949108726 100644 --- a/http/assets.go +++ b/http/assets.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build ui package http diff --git a/http/assets_stub.go b/http/assets_stub.go index e1b4daf3991e1f..1989a09d9860c7 100644 --- a/http/assets_stub.go +++ b/http/assets_stub.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !ui package http diff --git a/http/auth_token_test.go b/http/auth_token_test.go index d96e18383ac714..552a32cbdd937d 100644 --- a/http/auth_token_test.go +++ b/http/auth_token_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/cors.go b/http/cors.go index 7e8c311e624f59..74cfeeaef072ed 100644 --- a/http/cors.go +++ b/http/cors.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/custom_header_test.go b/http/custom_header_test.go index 289379a84cb6d8..8c204584aeb781 100644 --- a/http/custom_header_test.go +++ b/http/custom_header_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/events.go b/http/events.go index 072fcd60ea04aa..e5eaa78be9729c 100644 --- a/http/events.go +++ b/http/events.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/events_test.go b/http/events_test.go index b5ce0a1a3581c2..d3debcde6babe0 100644 --- a/http/events_test.go +++ b/http/events_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/forwarded_for_test.go b/http/forwarded_for_test.go index 89bc62acc26534..b7060c6671e4c1 100644 --- a/http/forwarded_for_test.go +++ b/http/forwarded_for_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/forwarding_bench_test.go b/http/forwarding_bench_test.go index 0c3f5e2a286d22..ecc2a3b8fce8a2 100644 --- a/http/forwarding_bench_test.go +++ b/http/forwarding_bench_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/forwarding_test.go b/http/forwarding_test.go index 51cc2c0e01812d..350906563b1888 100644 --- a/http/forwarding_test.go +++ b/http/forwarding_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/handler.go b/http/handler.go index a91a0514ffd0a2..34c4b12cf19ce6 100644 --- a/http/handler.go +++ b/http/handler.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/handler_test.go b/http/handler_test.go index 244fe417725754..49565b41e2356d 100644 --- a/http/handler_test.go +++ b/http/handler_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/help.go b/http/help.go index 64085f1e38b6ba..7ec6fb6131aae1 100644 --- a/http/help.go +++ b/http/help.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/help_test.go b/http/help_test.go index d02c26a9521e93..ec9a67dd1c58c3 100644 --- a/http/help_test.go +++ b/http/help_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/http_test.go b/http/http_test.go index 5e51ce7d0fe313..692aef0d828771 100644 --- a/http/http_test.go +++ b/http/http_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/logical.go b/http/logical.go index 8a681d9107d48d..6b12a26f3bfe6e 100644 --- a/http/logical.go +++ b/http/logical.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( @@ -183,10 +180,8 @@ func buildLogicalRequestNoAuth(perfStandby bool, w http.ResponseWriter, r *http. } data = parseQuery(r.URL.Query()) - case "HEAD": - op = logical.HeaderOperation - data = parseQuery(r.URL.Query()) - case "OPTIONS": + + case "OPTIONS", "HEAD": default: return nil, nil, http.StatusMethodNotAllowed, nil } diff --git a/http/logical_test.go b/http/logical_test.go index a9ccdff0525f16..52c77151ac8ead 100644 --- a/http/logical_test.go +++ b/http/logical_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( @@ -11,7 +8,6 @@ import ( "io/ioutil" "net/http" "net/http/httptest" - "os" "reflect" "strconv" "strings" @@ -761,180 +757,3 @@ func TestLogical_ErrRelativePath(t *testing.T) { t.Errorf("expected response for write to include %q", logical.ErrRelativePath.Error()) } } - -func testBuiltinPluginMetadataAuditLog(t *testing.T, log map[string]interface{}, expectedMountClass string) { - if mountClass, ok := log["mount_class"].(string); !ok { - t.Fatalf("mount_class should be a string, not %T", log["mount_class"]) - } else if mountClass != expectedMountClass { - t.Fatalf("bad: mount_class should be %s, not %s", expectedMountClass, mountClass) - } - - if _, ok := log["mount_running_version"].(string); !ok { - t.Fatalf("mount_running_version should be a string, not %T", log["mount_running_version"]) - } - - if _, ok := log["mount_running_sha256"].(string); ok { - t.Fatalf("mount_running_sha256 should be nil, not %T", log["mount_running_sha256"]) - } - - if mountIsExternalPlugin, ok := log["mount_is_external_plugin"].(bool); ok && mountIsExternalPlugin { - t.Fatalf("mount_is_external_plugin should be nil or false, not %T", log["mount_is_external_plugin"]) - } -} - -// TestLogical_AuditEnabled_ShouldLogPluginMetadata_Auth tests that we have plugin metadata of a builtin auth plugin -// in audit log when it is enabled -func TestLogical_AuditEnabled_ShouldLogPluginMetadata_Auth(t *testing.T) { - coreConfig := &vault.CoreConfig{ - AuditBackends: map[string]audit.Factory{ - "file": auditFile.Factory, - }, - } - - cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: Handler, - }) - - cluster.Start() - defer cluster.Cleanup() - - cores := cluster.Cores - - core := cores[0].Core - c := cluster.Cores[0].Client - vault.TestWaitActive(t, core) - - // Enable the audit backend - tempDir := t.TempDir() - auditLogFile, err := os.CreateTemp(tempDir, "") - if err != nil { - t.Fatal(err) - } - - err = c.Sys().EnableAuditWithOptions("file", &api.EnableAuditOptions{ - Type: "file", - Options: map[string]string{ - "file_path": auditLogFile.Name(), - }, - }) - if err != nil { - t.Fatal(err) - } - - _, err = c.Logical().Write("auth/token/create", map[string]interface{}{ - "ttl": "10s", - }) - if err != nil { - t.Fatal(err) - } - - // Check the audit trail on request and response - decoder := json.NewDecoder(auditLogFile) - var auditRecord map[string]interface{} - for decoder.Decode(&auditRecord) == nil { - auditRequest := map[string]interface{}{} - if req, ok := auditRecord["request"]; ok { - auditRequest = req.(map[string]interface{}) - if auditRequest["path"] != "auth/token/create" { - continue - } - } - testBuiltinPluginMetadataAuditLog(t, auditRequest, consts.PluginTypeCredential.String()) - - auditResponse := map[string]interface{}{} - if req, ok := auditRecord["response"]; ok { - auditRequest = req.(map[string]interface{}) - if auditResponse["path"] != "auth/token/create" { - continue - } - } - testBuiltinPluginMetadataAuditLog(t, auditResponse, consts.PluginTypeCredential.String()) - } -} - -// TestLogical_AuditEnabled_ShouldLogPluginMetadata_Secret tests that we have plugin metadata of a builtin secret plugin -// in audit log when it is enabled -func TestLogical_AuditEnabled_ShouldLogPluginMetadata_Secret(t *testing.T) { - coreConfig := &vault.CoreConfig{ - LogicalBackends: map[string]logical.Factory{ - "kv": kv.VersionedKVFactory, - }, - AuditBackends: map[string]audit.Factory{ - "file": auditFile.Factory, - }, - } - - cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ - HandlerFunc: Handler, - }) - - cluster.Start() - defer cluster.Cleanup() - - cores := cluster.Cores - - core := cores[0].Core - c := cluster.Cores[0].Client - vault.TestWaitActive(t, core) - - if err := c.Sys().Mount("kv/", &api.MountInput{ - Type: "kv-v2", - }); err != nil { - t.Fatalf("kv-v2 mount attempt failed - err: %#v\n", err) - } - - // Enable the audit backend - tempDir := t.TempDir() - auditLogFile, err := os.CreateTemp(tempDir, "") - if err != nil { - t.Fatal(err) - } - - err = c.Sys().EnableAuditWithOptions("file", &api.EnableAuditOptions{ - Type: "file", - Options: map[string]string{ - "file_path": auditLogFile.Name(), - }, - }) - if err != nil { - t.Fatal(err) - } - - { - writeData := map[string]interface{}{ - "data": map[string]interface{}{ - "bar": "a", - }, - } - corehelpers.RetryUntil(t, 10*time.Second, func() error { - resp, err := c.Logical().Write("kv/data/foo", writeData) - if err != nil { - t.Fatalf("write request failed, err: %#v, resp: %#v\n", err, resp) - } - return nil - }) - } - - // Check the audit trail on request and response - decoder := json.NewDecoder(auditLogFile) - var auditRecord map[string]interface{} - for decoder.Decode(&auditRecord) == nil { - auditRequest := map[string]interface{}{} - if req, ok := auditRecord["request"]; ok { - auditRequest = req.(map[string]interface{}) - if auditRequest["path"] != "kv/data/foo" { - continue - } - } - testBuiltinPluginMetadataAuditLog(t, auditRequest, consts.PluginTypeSecrets.String()) - - auditResponse := map[string]interface{}{} - if req, ok := auditRecord["response"]; ok { - auditRequest = req.(map[string]interface{}) - if auditResponse["path"] != "kv/data/foo" { - continue - } - } - testBuiltinPluginMetadataAuditLog(t, auditResponse, consts.PluginTypeSecrets.String()) - } -} diff --git a/http/plugin_test.go b/http/plugin_test.go index b0d85be6d17340..164a3d25f664d0 100644 --- a/http/plugin_test.go +++ b/http/plugin_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_audit_test.go b/http/sys_audit_test.go index 2ec4ffc30c51b3..58873bfb12aa16 100644 --- a/http/sys_audit_test.go +++ b/http/sys_audit_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_auth_test.go b/http/sys_auth_test.go index 3bd0a009dda84a..0071ab030cd839 100644 --- a/http/sys_auth_test.go +++ b/http/sys_auth_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_config_cors_test.go b/http/sys_config_cors_test.go index 2f4a29a49e0668..3ad0e810a2c5a5 100644 --- a/http/sys_config_cors_test.go +++ b/http/sys_config_cors_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_config_state_test.go b/http/sys_config_state_test.go index c8f6c402bddb8f..5b5d9aac0dfa9c 100644 --- a/http/sys_config_state_test.go +++ b/http/sys_config_state_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_feature_flags.go b/http/sys_feature_flags.go index 9f654b7febda6f..11ece32795b779 100644 --- a/http/sys_feature_flags.go +++ b/http/sys_feature_flags.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_generate_root.go b/http/sys_generate_root.go index 7f953e4d449a74..db2da6f7f3b17f 100644 --- a/http/sys_generate_root.go +++ b/http/sys_generate_root.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_generate_root_test.go b/http/sys_generate_root_test.go index dbd7796315a6c1..36e25f82114ac0 100644 --- a/http/sys_generate_root_test.go +++ b/http/sys_generate_root_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_health.go b/http/sys_health.go index b3f29d4dd5e68e..a1f2bb8bd34798 100644 --- a/http/sys_health.go +++ b/http/sys_health.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_health_test.go b/http/sys_health_test.go index 9761ec16c98ca3..68ef11b9e2f571 100644 --- a/http/sys_health_test.go +++ b/http/sys_health_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_hostinfo_test.go b/http/sys_hostinfo_test.go index 756841e724f736..af313a382b2b29 100644 --- a/http/sys_hostinfo_test.go +++ b/http/sys_hostinfo_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_in_flight_requests.go b/http/sys_in_flight_requests.go index bdf3ebaf9d20ac..b38156f38ebc4a 100644 --- a/http/sys_in_flight_requests.go +++ b/http/sys_in_flight_requests.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_in_flight_requests_test.go b/http/sys_in_flight_requests_test.go index 93c92c53983557..de64d708c68f2d 100644 --- a/http/sys_in_flight_requests_test.go +++ b/http/sys_in_flight_requests_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_init.go b/http/sys_init.go index 905916bef6283a..ae3059462bef42 100644 --- a/http/sys_init.go +++ b/http/sys_init.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_init_test.go b/http/sys_init_test.go index 79dd275824bd14..4953c4244ce822 100644 --- a/http/sys_init_test.go +++ b/http/sys_init_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( @@ -153,11 +150,12 @@ func TestSysInit_Put_ValidateParams(t *testing.T) { } func TestSysInit_Put_ValidateParams_AutoUnseal(t *testing.T) { - testSeal, _ := seal.NewTestSeal(&seal.TestSealOpts{Name: "transit"}) + testSeal := seal.NewTestSeal(nil) autoSeal, err := vault.NewAutoSeal(testSeal) if err != nil { t.Fatal(err) } + autoSeal.SetType("transit") // Create the transit server. conf := &vault.CoreConfig{ diff --git a/http/sys_internal_test.go b/http/sys_internal_test.go index 11d9376248c567..d3c066f70c847b 100644 --- a/http/sys_internal_test.go +++ b/http/sys_internal_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_leader.go b/http/sys_leader.go index 6b39c4401af3fb..8c2ce21e5001d4 100644 --- a/http/sys_leader.go +++ b/http/sys_leader.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_leader_test.go b/http/sys_leader_test.go index 3292b7f2407bc8..974b3a7b7e40f6 100644 --- a/http/sys_leader_test.go +++ b/http/sys_leader_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_lease_test.go b/http/sys_lease_test.go index 6b069ca37cedbe..a254be71c211cf 100644 --- a/http/sys_lease_test.go +++ b/http/sys_lease_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_metrics.go b/http/sys_metrics.go index 2bb819b34fd811..012417282e5f59 100644 --- a/http/sys_metrics.go +++ b/http/sys_metrics.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_metrics_test.go b/http/sys_metrics_test.go index 167347b4f70063..e8770a1bfac840 100644 --- a/http/sys_metrics_test.go +++ b/http/sys_metrics_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_monitor_test.go b/http/sys_monitor_test.go index 5d428c419a9cd2..b5b7f3b981f869 100644 --- a/http/sys_monitor_test.go +++ b/http/sys_monitor_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_mount_test.go b/http/sys_mount_test.go index 384f5bf810bf93..5675cce5e30555 100644 --- a/http/sys_mount_test.go +++ b/http/sys_mount_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_mounts_test.go b/http/sys_mounts_test.go index 5f2218514ec074..7c113d9879509b 100644 --- a/http/sys_mounts_test.go +++ b/http/sys_mounts_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_policy_test.go b/http/sys_policy_test.go index 1ab1e85bb7acc8..6844a5321d6a6f 100644 --- a/http/sys_policy_test.go +++ b/http/sys_policy_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_raft.go b/http/sys_raft.go index 1e00ebe5d90cdd..428aad4f7da3a3 100644 --- a/http/sys_raft.go +++ b/http/sys_raft.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_rekey.go b/http/sys_rekey.go index c05dc839765310..d1cec653a6283f 100644 --- a/http/sys_rekey.go +++ b/http/sys_rekey.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_rekey_test.go b/http/sys_rekey_test.go index eaef4dd7a1f3be..fd068ba48bda14 100644 --- a/http/sys_rekey_test.go +++ b/http/sys_rekey_test.go @@ -1,57 +1,46 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( "encoding/hex" "encoding/json" "fmt" + "net/http" "reflect" "testing" "github.com/go-test/deep" - "github.com/hashicorp/vault/sdk/helper/testhelpers/schema" "github.com/hashicorp/vault/vault" ) // Test to check if the API errors out when wrong number of PGP keys are // supplied for rekey func TestSysRekey_Init_pgpKeysEntriesForRekey(t *testing.T) { - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - HandlerFunc: Handler, - RequestResponseCallback: schema.ResponseValidatingCallback(t), - }) - cluster.Start() - defer cluster.Cleanup() - cl := cluster.Cores[0].Client + core, _, token := vault.TestCoreUnsealed(t) + ln, addr := TestServer(t, core) + defer ln.Close() + TestServerAuth(t, addr, token) - _, err := cl.Logical().Write("sys/rekey/init", map[string]interface{}{ + resp := testHttpPut(t, token, addr+"/v1/sys/rekey/init", map[string]interface{}{ "secret_shares": 5, "secret_threshold": 3, "pgp_keys": []string{"pgpkey1"}, }) - if err == nil { - t.Fatal("should have failed to write pgp key entry due to mismatched keys", err) - } + testResponseStatus(t, resp, 400) } func TestSysRekey_Init_Status(t *testing.T) { t.Run("status-barrier-default", func(t *testing.T) { - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - HandlerFunc: Handler, - RequestResponseCallback: schema.ResponseValidatingCallback(t), - }) - cluster.Start() - defer cluster.Cleanup() - cl := cluster.Cores[0].Client + core, _, token := vault.TestCoreUnsealed(t) + ln, addr := TestServer(t, core) + defer ln.Close() + TestServerAuth(t, addr, token) - resp, err := cl.Logical().Read("sys/rekey/init") + resp, err := http.Get(addr + "/v1/sys/rekey/init") if err != nil { t.Fatalf("err: %s", err) } - actual := resp.Data + var actual map[string]interface{} expected := map[string]interface{}{ "started": false, "t": json.Number("0"), @@ -63,7 +52,8 @@ func TestSysRekey_Init_Status(t *testing.T) { "nonce": "", "verification_required": false, } - + testResponseStatus(t, resp, 200) + testResponseBody(t, resp, &actual) if !reflect.DeepEqual(actual, expected) { t.Fatalf("\nexpected: %#v\nactual: %#v", expected, actual) } @@ -72,24 +62,19 @@ func TestSysRekey_Init_Status(t *testing.T) { func TestSysRekey_Init_Setup(t *testing.T) { t.Run("init-barrier-barrier-key", func(t *testing.T) { - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - HandlerFunc: Handler, - RequestResponseCallback: schema.ResponseValidatingCallback(t), - }) - cluster.Start() - defer cluster.Cleanup() - cl := cluster.Cores[0].Client + core, _, token := vault.TestCoreUnsealed(t) + ln, addr := TestServer(t, core) + defer ln.Close() + TestServerAuth(t, addr, token) // Start rekey - resp, err := cl.Logical().Write("sys/rekey/init", map[string]interface{}{ + resp := testHttpPut(t, token, addr+"/v1/sys/rekey/init", map[string]interface{}{ "secret_shares": 5, "secret_threshold": 3, }) - if err != nil { - t.Fatalf("err: %s", err) - } + testResponseStatus(t, resp, 200) - actual := resp.Data + var actual map[string]interface{} expected := map[string]interface{}{ "started": true, "t": json.Number("3"), @@ -100,7 +85,8 @@ func TestSysRekey_Init_Setup(t *testing.T) { "backup": false, "verification_required": false, } - + testResponseStatus(t, resp, 200) + testResponseBody(t, resp, &actual) if actual["nonce"].(string) == "" { t.Fatalf("nonce was empty") } @@ -110,12 +96,9 @@ func TestSysRekey_Init_Setup(t *testing.T) { } // Get rekey status - resp, err = cl.Logical().Read("sys/rekey/init") - if err != nil { - t.Fatalf("err: %s", err) - } + resp = testHttpGet(t, token, addr+"/v1/sys/rekey/init") - actual = resp.Data + actual = map[string]interface{}{} expected = map[string]interface{}{ "started": true, "t": json.Number("3"), @@ -126,6 +109,8 @@ func TestSysRekey_Init_Setup(t *testing.T) { "backup": false, "verification_required": false, } + testResponseStatus(t, resp, 200) + testResponseBody(t, resp, &actual) if actual["nonce"].(string) == "" { t.Fatalf("nonce was empty") } @@ -141,33 +126,26 @@ func TestSysRekey_Init_Setup(t *testing.T) { func TestSysRekey_Init_Cancel(t *testing.T) { t.Run("cancel-barrier-barrier-key", func(t *testing.T) { - cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{ - HandlerFunc: Handler, - RequestResponseCallback: schema.ResponseValidatingCallback(t), - }) - cluster.Start() - defer cluster.Cleanup() - cl := cluster.Cores[0].Client + core, _, token := vault.TestCoreUnsealed(t) + ln, addr := TestServer(t, core) + defer ln.Close() + TestServerAuth(t, addr, token) - _, err := cl.Logical().Write("sys/rekey/init", map[string]interface{}{ + resp := testHttpPut(t, token, addr+"/v1/sys/rekey/init", map[string]interface{}{ "secret_shares": 5, "secret_threshold": 3, }) - if err != nil { - t.Fatalf("err: %s", err) - } + testResponseStatus(t, resp, 200) - _, err = cl.Logical().Delete("sys/rekey/init") - if err != nil { - t.Fatalf("err: %s", err) - } + resp = testHttpDelete(t, token, addr+"/v1/sys/rekey/init") + testResponseStatus(t, resp, 204) - resp, err := cl.Logical().Read("sys/rekey/init") + resp, err := http.Get(addr + "/v1/sys/rekey/init") if err != nil { t.Fatalf("err: %s", err) } - actual := resp.Data + var actual map[string]interface{} expected := map[string]interface{}{ "started": false, "t": json.Number("0"), @@ -179,6 +157,8 @@ func TestSysRekey_Init_Cancel(t *testing.T) { "nonce": "", "verification_required": false, } + testResponseStatus(t, resp, 200) + testResponseBody(t, resp, &actual) if !reflect.DeepEqual(actual, expected) { t.Fatalf("\nexpected: %#v\nactual: %#v", expected, actual) } diff --git a/http/sys_rotate_test.go b/http/sys_rotate_test.go index dfc28a257c5590..81597c7008e012 100644 --- a/http/sys_rotate_test.go +++ b/http/sys_rotate_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_seal.go b/http/sys_seal.go index 5d32828e70f9df..24f491b65d1d6e 100644 --- a/http/sys_seal.go +++ b/http/sys_seal.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/sys_seal_test.go b/http/sys_seal_test.go index ef5922d52220fe..26796d02cc72f9 100644 --- a/http/sys_seal_test.go +++ b/http/sys_seal_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( @@ -67,6 +64,80 @@ func TestSysSealStatus(t *testing.T) { } } +func TestSysSealStatus_Warnings(t *testing.T) { + core := vault.TestCore(t) + vault.TestCoreInit(t, core) + ln, addr := TestServer(t, core) + defer ln.Close() + + // Manually configure DisableSSCTokens to be true + core.GetCoreConfigInternal().DisableSSCTokens = true + + resp, err := http.Get(addr + "/v1/sys/seal-status") + if err != nil { + t.Fatalf("err: %s", err) + } + + var actual map[string]interface{} + expected := map[string]interface{}{ + "sealed": true, + "t": json.Number("3"), + "n": json.Number("3"), + "progress": json.Number("0"), + "nonce": "", + "type": "shamir", + "recovery_seal": false, + "initialized": true, + "migration": false, + "build_date": version.BuildDate, + } + testResponseStatus(t, resp, 200) + testResponseBody(t, resp, &actual) + if actual["version"] == nil { + t.Fatalf("expected version information") + } + expected["version"] = actual["version"] + if actual["cluster_name"] == nil { + delete(expected, "cluster_name") + } else { + expected["cluster_name"] = actual["cluster_name"] + } + if actual["cluster_id"] == nil { + delete(expected, "cluster_id") + } else { + expected["cluster_id"] = actual["cluster_id"] + } + actualWarnings := actual["warnings"] + if actualWarnings == nil { + t.Fatalf("expected warnings about SSCToken disabling") + } + + actualWarningsArray, ok := actualWarnings.([]interface{}) + if !ok { + t.Fatalf("expected warnings about SSCToken disabling were not in the right format") + } + if len(actualWarningsArray) != 1 { + t.Fatalf("too many warnings were given") + } + actualWarning, ok := actualWarningsArray[0].(string) + if !ok { + t.Fatalf("expected warning about SSCToken disabling was not in the right format") + } + + expectedWarning := "Server Side Consistent Tokens are disabled, due to the " + + "VAULT_DISABLE_SERVER_SIDE_CONSISTENT_TOKENS environment variable being set. " + + "It is not recommended to run Vault for an extended period of time with this configuration." + if actualWarning != expectedWarning { + t.Fatalf("actual warning was not as expected. Expected %s, but got %s", expectedWarning, actualWarning) + } + + expected["warnings"] = actual["warnings"] + + if diff := deep.Equal(actual, expected); diff != nil { + t.Fatal(diff) + } +} + func TestSysSealStatus_uninit(t *testing.T) { core := vault.TestCore(t) ln, addr := TestServer(t, core) diff --git a/http/sys_wrapping_test.go b/http/sys_wrapping_test.go index c991bd23048005..4a26c44fb868d4 100644 --- a/http/sys_wrapping_test.go +++ b/http/sys_wrapping_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/testing.go b/http/testing.go index 95153991e7667a..9bb3970a6c32da 100644 --- a/http/testing.go +++ b/http/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/unwrapping_raw_body_test.go b/http/unwrapping_raw_body_test.go index e1ad0df9c297a7..6ba24b7c9098dd 100644 --- a/http/unwrapping_raw_body_test.go +++ b/http/unwrapping_raw_body_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/http/util.go b/http/util.go index f714efa54f3bb3..b4c8923cc3eea2 100644 --- a/http/util.go +++ b/http/util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package http import ( diff --git a/internal/go118_sha1_patch.go b/internal/go118_sha1_patch.go index fc2ccf238266ec..c7f94844ef3c32 100644 --- a/internal/go118_sha1_patch.go +++ b/internal/go118_sha1_patch.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package internal import ( diff --git a/internalshared/configutil/config.go b/internalshared/configutil/config.go index 99777229f0d49e..f4c2ec11ad3497 100644 --- a/internalshared/configutil/config.go +++ b/internalshared/configutil/config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/config_util.go b/internalshared/configutil/config_util.go index 3fd4bb9487130b..05cb061e7e9b65 100644 --- a/internalshared/configutil/config_util.go +++ b/internalshared/configutil/config_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package configutil diff --git a/internalshared/configutil/encrypt_decrypt.go b/internalshared/configutil/encrypt_decrypt.go index f0e5fcc04270b2..1e9f830901c446 100644 --- a/internalshared/configutil/encrypt_decrypt.go +++ b/internalshared/configutil/encrypt_decrypt.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/encrypt_decrypt_test.go b/internalshared/configutil/encrypt_decrypt_test.go index 19bf6858338b87..b9257bb6c2b2de 100644 --- a/internalshared/configutil/encrypt_decrypt_test.go +++ b/internalshared/configutil/encrypt_decrypt_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/hcp_link.go b/internalshared/configutil/hcp_link.go index fd8d6b6ca8539c..a46c3bb1f55309 100644 --- a/internalshared/configutil/hcp_link.go +++ b/internalshared/configutil/hcp_link.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/http_response_headers.go b/internalshared/configutil/http_response_headers.go index b808f9e6522c9f..2db3034e588b69 100644 --- a/internalshared/configutil/http_response_headers.go +++ b/internalshared/configutil/http_response_headers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/kms.go b/internalshared/configutil/kms.go index 025018124984d0..614a6ec8e5712a 100644 --- a/internalshared/configutil/kms.go +++ b/internalshared/configutil/kms.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/lint.go b/internalshared/configutil/lint.go index 24b968e6b96b04..2b5b634156b3cf 100644 --- a/internalshared/configutil/lint.go +++ b/internalshared/configutil/lint.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/listener.go b/internalshared/configutil/listener.go index 5e9373a169c777..ea28dbf199eb70 100644 --- a/internalshared/configutil/listener.go +++ b/internalshared/configutil/listener.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( @@ -100,8 +97,6 @@ type Listener struct { AgentAPI *AgentAPI `hcl:"agent_api"` - ProxyAPI *ProxyAPI `hcl:"proxy_api"` - Telemetry ListenerTelemetry `hcl:"telemetry"` Profiling ListenerProfiling `hcl:"profiling"` InFlightRequestLogging ListenerInFlightRequestLogging `hcl:"inflight_requests_logging"` @@ -125,11 +120,6 @@ type AgentAPI struct { EnableQuit bool `hcl:"enable_quit"` } -// ProxyAPI allows users to select which parts of the Vault Proxy API they want enabled. -type ProxyAPI struct { - EnableQuit bool `hcl:"enable_quit"` -} - func (l *Listener) GoString() string { return fmt.Sprintf("*%#v", *l) } diff --git a/internalshared/configutil/listener_test.go b/internalshared/configutil/listener_test.go index da7d76596b6fe1..803086e483e237 100644 --- a/internalshared/configutil/listener_test.go +++ b/internalshared/configutil/listener_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/merge.go b/internalshared/configutil/merge.go index 940e8bfcfb2cc9..4bc30e62d8290c 100644 --- a/internalshared/configutil/merge.go +++ b/internalshared/configutil/merge.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil func (c *SharedConfig) Merge(c2 *SharedConfig) *SharedConfig { diff --git a/internalshared/configutil/telemetry.go b/internalshared/configutil/telemetry.go index 270eb493d5ee78..77620770db3e60 100644 --- a/internalshared/configutil/telemetry.go +++ b/internalshared/configutil/telemetry.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/telemetry_test.go b/internalshared/configutil/telemetry_test.go index aaeb808171ab78..dda74711dcb554 100644 --- a/internalshared/configutil/telemetry_test.go +++ b/internalshared/configutil/telemetry_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/userlockout.go b/internalshared/configutil/userlockout.go index df76308ddb5ab5..ccf51b23b26476 100644 --- a/internalshared/configutil/userlockout.go +++ b/internalshared/configutil/userlockout.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/configutil/userlockout_test.go b/internalshared/configutil/userlockout_test.go index db05441c684d43..d5ab42cbe86a94 100644 --- a/internalshared/configutil/userlockout_test.go +++ b/internalshared/configutil/userlockout_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package configutil import ( diff --git a/internalshared/listenerutil/bufconn.go b/internalshared/listenerutil/bufconn.go index 54af0a783e20df..d3d9d653c576d6 100644 --- a/internalshared/listenerutil/bufconn.go +++ b/internalshared/listenerutil/bufconn.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package listenerutil import ( diff --git a/internalshared/listenerutil/listener.go b/internalshared/listenerutil/listener.go index 9a4edb45dcdcea..6095713be5d243 100644 --- a/internalshared/listenerutil/listener.go +++ b/internalshared/listenerutil/listener.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package listenerutil import ( diff --git a/internalshared/listenerutil/listener_test.go b/internalshared/listenerutil/listener_test.go index 6219727e190753..3c2afa593aed53 100644 --- a/internalshared/listenerutil/listener_test.go +++ b/internalshared/listenerutil/listener_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package listenerutil import ( diff --git a/main.go b/main.go index 0417bd98773bed..bc8a8651f8f9ce 100644 --- a/main.go +++ b/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main // import "github.com/hashicorp/vault" import ( diff --git a/main_test.go b/main_test.go index 36398339dfcf2f..4c4c79a2cb8e28 100644 --- a/main_test.go +++ b/main_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main // import "github.com/hashicorp/vault" // This file is intentionally empty to force early versions of Go diff --git a/physical/aerospike/aerospike.go b/physical/aerospike/aerospike.go index 81aab224ac24ec..b323ccd344d408 100644 --- a/physical/aerospike/aerospike.go +++ b/physical/aerospike/aerospike.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aerospike import ( diff --git a/physical/aerospike/aerospike_test.go b/physical/aerospike/aerospike_test.go index 6324b5e6d28b8e..83d1be5599a478 100644 --- a/physical/aerospike/aerospike_test.go +++ b/physical/aerospike/aerospike_test.go @@ -1,19 +1,14 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package aerospike import ( "context" "math/bits" - "runtime" - "strings" "testing" "time" aero "github.com/aerospike/aerospike-client-go/v5" log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" "github.com/hashicorp/vault/sdk/helper/logging" "github.com/hashicorp/vault/sdk/physical" ) @@ -49,11 +44,6 @@ type aerospikeConfig struct { } func prepareAerospikeContainer(t *testing.T) (func(), *aerospikeConfig) { - // Skipping on ARM, as this image can't run on ARM architecture - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as this image is not supported on ARM architectures") - } - runner, err := docker.NewServiceRunner(docker.RunOptions{ ImageRepo: "docker.mirror.hashicorp.services/aerospike/aerospike-server", ContainerName: "aerospikedb", diff --git a/physical/alicloudoss/alicloudoss.go b/physical/alicloudoss/alicloudoss.go index d32d14b3b00ac1..40f3da6d5643c3 100644 --- a/physical/alicloudoss/alicloudoss.go +++ b/physical/alicloudoss/alicloudoss.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package alicloudoss import ( diff --git a/physical/alicloudoss/alicloudoss_test.go b/physical/alicloudoss/alicloudoss_test.go index 1b098bd3440519..ad292da4f6b7b2 100644 --- a/physical/alicloudoss/alicloudoss_test.go +++ b/physical/alicloudoss/alicloudoss_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package alicloudoss import ( diff --git a/physical/azure/azure.go b/physical/azure/azure.go index fe884491dbf5f1..eb158a993d8a22 100644 --- a/physical/azure/azure.go +++ b/physical/azure/azure.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package azure import ( diff --git a/physical/azure/azure_test.go b/physical/azure/azure_test.go index a004c83357637c..20392a21c688b3 100644 --- a/physical/azure/azure_test.go +++ b/physical/azure/azure_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package azure import ( diff --git a/physical/cassandra/cassandra.go b/physical/cassandra/cassandra.go index fc9261a5502764..84c2ab149db237 100644 --- a/physical/cassandra/cassandra.go +++ b/physical/cassandra/cassandra.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cassandra import ( diff --git a/physical/cassandra/cassandra_test.go b/physical/cassandra/cassandra_test.go index 9466d0a7084527..e9fe7bc059a5b6 100644 --- a/physical/cassandra/cassandra_test.go +++ b/physical/cassandra/cassandra_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cassandra import ( diff --git a/physical/cockroachdb/cockroachdb.go b/physical/cockroachdb/cockroachdb.go index 38f935cdebd5c9..385074d917cbdf 100644 --- a/physical/cockroachdb/cockroachdb.go +++ b/physical/cockroachdb/cockroachdb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cockroachdb import ( diff --git a/physical/cockroachdb/cockroachdb_ha.go b/physical/cockroachdb/cockroachdb_ha.go index 39b617546a87d6..1f22465d0810d3 100644 --- a/physical/cockroachdb/cockroachdb_ha.go +++ b/physical/cockroachdb/cockroachdb_ha.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cockroachdb import ( diff --git a/physical/cockroachdb/cockroachdb_test.go b/physical/cockroachdb/cockroachdb_test.go index ab652554ae3800..70abfda98293c3 100644 --- a/physical/cockroachdb/cockroachdb_test.go +++ b/physical/cockroachdb/cockroachdb_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cockroachdb import ( @@ -9,12 +6,10 @@ import ( "fmt" "net/url" "os" - "runtime" - "strings" "testing" log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" "github.com/hashicorp/vault/sdk/helper/logging" "github.com/hashicorp/vault/sdk/physical" ) @@ -28,11 +23,6 @@ type Config struct { var _ docker.ServiceConfig = &Config{} func prepareCockroachDBTestContainer(t *testing.T) (func(), *Config) { - // Skipping, as this image can't run on arm architecture - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as CockroachDB 1.0 is not supported on ARM architectures") - } - if retURL := os.Getenv("CR_URL"); retURL != "" { s, err := docker.NewServiceURLParse(retURL) if err != nil { diff --git a/physical/cockroachdb/keywords.go b/physical/cockroachdb/keywords.go index f44089f9f76f70..390dc63f8dfca2 100644 --- a/physical/cockroachdb/keywords.go +++ b/physical/cockroachdb/keywords.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cockroachdb // sqlKeywords is a reference of all of the keywords that we do not allow for use as the table name diff --git a/physical/consul/consul.go b/physical/consul/consul.go index b17dbc4c169304..d840b22a36c850 100644 --- a/physical/consul/consul.go +++ b/physical/consul/consul.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/physical/consul/consul_test.go b/physical/consul/consul_test.go index b0a16ce85a37f8..31b01300e16fd2 100644 --- a/physical/consul/consul_test.go +++ b/physical/consul/consul_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/physical/consul/helpers.go b/physical/consul/helpers.go index ce7c47fdcc9d90..71c30b310068ee 100644 --- a/physical/consul/helpers.go +++ b/physical/consul/helpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/physical/couchdb/couchdb.go b/physical/couchdb/couchdb.go index 9c54b0494ae79d..86fc139ed92d05 100644 --- a/physical/couchdb/couchdb.go +++ b/physical/couchdb/couchdb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package couchdb import ( diff --git a/physical/couchdb/couchdb_test.go b/physical/couchdb/couchdb_test.go index d7ae4be27766e8..abf11b7c1aae5c 100644 --- a/physical/couchdb/couchdb_test.go +++ b/physical/couchdb/couchdb_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package couchdb import ( @@ -10,13 +7,12 @@ import ( "net/http" "net/url" "os" - "runtime" "strings" "testing" "time" log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" "github.com/hashicorp/vault/sdk/helper/logging" "github.com/hashicorp/vault/sdk/physical" ) @@ -79,13 +75,6 @@ func (c couchDB) URL() *url.URL { var _ docker.ServiceConfig = &couchDB{} func prepareCouchdbDBTestContainer(t *testing.T) (func(), *couchDB) { - // ARM64 is only supported on CouchDB 2 and above. If we update - // our image and support to 2 and above, we can unskip these: - // https://hub.docker.com/r/arm64v8/couchdb/ - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as CouchDB 1.6 is not supported on ARM architectures") - } - // If environment variable is set, assume caller wants to target a real // DynamoDB. if os.Getenv("COUCHDB_ENDPOINT") != "" { diff --git a/physical/dynamodb/dynamodb.go b/physical/dynamodb/dynamodb.go index 591c65cf710a6a..18c2bbf50a4d1a 100644 --- a/physical/dynamodb/dynamodb.go +++ b/physical/dynamodb/dynamodb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dynamodb import ( diff --git a/physical/dynamodb/dynamodb_test.go b/physical/dynamodb/dynamodb_test.go index c4d25b9d7cb61e..1058a6e210262f 100644 --- a/physical/dynamodb/dynamodb_test.go +++ b/physical/dynamodb/dynamodb_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dynamodb import ( @@ -10,14 +7,12 @@ import ( "net/http" "net/url" "os" - "runtime" - "strings" "testing" "time" "github.com/go-test/deep" log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/sdk/helper/docker" + "github.com/hashicorp/vault/helper/testhelpers/docker" "github.com/hashicorp/vault/sdk/helper/logging" "github.com/hashicorp/vault/sdk/physical" @@ -375,11 +370,6 @@ type Config struct { var _ docker.ServiceConfig = &Config{} func prepareDynamoDBTestContainer(t *testing.T) (func(), *Config) { - // Skipping on ARM, as this image can't run on ARM architecture - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as this image is not supported on ARM architectures") - } - // If environment variable is set, assume caller wants to target a real // DynamoDB. if endpoint := os.Getenv("AWS_DYNAMODB_ENDPOINT"); endpoint != "" { diff --git a/physical/etcd/etcd.go b/physical/etcd/etcd.go index f17a552b54c9aa..5bb8d4a31c1b8f 100644 --- a/physical/etcd/etcd.go +++ b/physical/etcd/etcd.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package etcd import ( diff --git a/physical/etcd/etcd3.go b/physical/etcd/etcd3.go index 57a838a6974310..486d448febf7a3 100644 --- a/physical/etcd/etcd3.go +++ b/physical/etcd/etcd3.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package etcd import ( diff --git a/physical/etcd/etcd3_test.go b/physical/etcd/etcd3_test.go index a2de6314dd6f37..71150a698c637e 100644 --- a/physical/etcd/etcd3_test.go +++ b/physical/etcd/etcd3_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package etcd import ( diff --git a/physical/foundationdb/fdb-go-install.sh b/physical/foundationdb/fdb-go-install.sh index 4b2c125223144c..550d5cf4d14e73 100755 --- a/physical/foundationdb/fdb-go-install.sh +++ b/physical/foundationdb/fdb-go-install.sh @@ -1,7 +1,4 @@ #!/bin/bash -eu -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # # fdb-go-install.sh # diff --git a/physical/foundationdb/foundationdb.go b/physical/foundationdb/foundationdb.go index 03f984f560392c..56305b2fbf7df9 100644 --- a/physical/foundationdb/foundationdb.go +++ b/physical/foundationdb/foundationdb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build foundationdb package foundationdb diff --git a/physical/foundationdb/foundationdb_test.go b/physical/foundationdb/foundationdb_test.go index ecd6aa8234e7e6..c6fe75d5ebfd26 100644 --- a/physical/foundationdb/foundationdb_test.go +++ b/physical/foundationdb/foundationdb_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build foundationdb package foundationdb diff --git a/physical/foundationdb/foundationdbstub.go b/physical/foundationdb/foundationdbstub.go index 283ca0969f0f57..4fc2734e50b1f6 100644 --- a/physical/foundationdb/foundationdbstub.go +++ b/physical/foundationdb/foundationdbstub.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !foundationdb package foundationdb diff --git a/physical/gcs/gcs.go b/physical/gcs/gcs.go index 4a3f5bdf4961a2..b5d1f6b9ff42ce 100644 --- a/physical/gcs/gcs.go +++ b/physical/gcs/gcs.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package gcs import ( diff --git a/physical/gcs/gcs_ha.go b/physical/gcs/gcs_ha.go index 2e4e762a7469a4..3a8e45d9819056 100644 --- a/physical/gcs/gcs_ha.go +++ b/physical/gcs/gcs_ha.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package gcs import ( diff --git a/physical/gcs/gcs_ha_test.go b/physical/gcs/gcs_ha_test.go index cdd59e731da9fb..8e1b91e777939f 100644 --- a/physical/gcs/gcs_ha_test.go +++ b/physical/gcs/gcs_ha_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package gcs import ( diff --git a/physical/gcs/gcs_test.go b/physical/gcs/gcs_test.go index 332ba35d79ba59..4caab730faa724 100644 --- a/physical/gcs/gcs_test.go +++ b/physical/gcs/gcs_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package gcs import ( diff --git a/physical/manta/manta.go b/physical/manta/manta.go index cfb0770144abbe..390683d3695fca 100644 --- a/physical/manta/manta.go +++ b/physical/manta/manta.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package manta import ( diff --git a/physical/manta/manta_test.go b/physical/manta/manta_test.go index 67d50fe71b14c0..8db52c53ab0e06 100644 --- a/physical/manta/manta_test.go +++ b/physical/manta/manta_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package manta import ( diff --git a/physical/mssql/mssql.go b/physical/mssql/mssql.go index 2859a65ef3243e..045b406387319b 100644 --- a/physical/mssql/mssql.go +++ b/physical/mssql/mssql.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mssql import ( diff --git a/physical/mssql/mssql_test.go b/physical/mssql/mssql_test.go index 2324ff5c03f28e..e026ff6fa2fd54 100644 --- a/physical/mssql/mssql_test.go +++ b/physical/mssql/mssql_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mssql import ( diff --git a/physical/mysql/mysql.go b/physical/mysql/mysql.go index 225882f7575b17..29bb3928ab8147 100644 --- a/physical/mysql/mysql.go +++ b/physical/mysql/mysql.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mysql import ( diff --git a/physical/mysql/mysql_test.go b/physical/mysql/mysql_test.go index b13c7e4a57c4e6..86373e91629e21 100644 --- a/physical/mysql/mysql_test.go +++ b/physical/mysql/mysql_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mysql import ( diff --git a/physical/postgresql/postgresql.go b/physical/postgresql/postgresql.go index a70133066750fe..ed4c883440cab9 100644 --- a/physical/postgresql/postgresql.go +++ b/physical/postgresql/postgresql.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package postgresql import ( diff --git a/physical/postgresql/postgresql_test.go b/physical/postgresql/postgresql_test.go index 5dec40aba5d9d0..15d1ab35076d90 100644 --- a/physical/postgresql/postgresql_test.go +++ b/physical/postgresql/postgresql_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package postgresql import ( diff --git a/physical/raft/bolt_32bit_test.go b/physical/raft/bolt_32bit_test.go index 7694d82f5cabed..ccb1641ea299b4 100644 --- a/physical/raft/bolt_32bit_test.go +++ b/physical/raft/bolt_32bit_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build 386 || arm package raft diff --git a/physical/raft/bolt_64bit_test.go b/physical/raft/bolt_64bit_test.go index c4b89b8cdc1458..d88c01eed59475 100644 --- a/physical/raft/bolt_64bit_test.go +++ b/physical/raft/bolt_64bit_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !386 && !arm package raft diff --git a/physical/raft/bolt_linux.go b/physical/raft/bolt_linux.go index b7774c61eaa892..4ea13e2a398608 100644 --- a/physical/raft/bolt_linux.go +++ b/physical/raft/bolt_linux.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/chunking_test.go b/physical/raft/chunking_test.go index 64f83e6b8daa03..a3f333ef424000 100644 --- a/physical/raft/chunking_test.go +++ b/physical/raft/chunking_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/fsm.go b/physical/raft/fsm.go index a8882812665ecf..c2d9953ce3f43c 100644 --- a/physical/raft/fsm.go +++ b/physical/raft/fsm.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/fsm_test.go b/physical/raft/fsm_test.go index ba0e382f097754..e80a6ce5573ffc 100644 --- a/physical/raft/fsm_test.go +++ b/physical/raft/fsm_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/msgpack.go b/physical/raft/msgpack.go index 88ac74d5949847..299dd8e0a98dfc 100644 --- a/physical/raft/msgpack.go +++ b/physical/raft/msgpack.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft // If we downgrade msgpack from v1.1.5 to v0.5.5, everything will still diff --git a/physical/raft/raft.go b/physical/raft/raft.go index 1060eda7aa6e18..4481053c6c52d0 100644 --- a/physical/raft/raft.go +++ b/physical/raft/raft.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( @@ -1367,7 +1364,7 @@ func (b *RaftBackend) Peers(ctx context.Context) ([]Peer, error) { // SnapshotHTTP is a wrapper for Snapshot that sends the snapshot as an HTTP // response. -func (b *RaftBackend) SnapshotHTTP(out *logical.HTTPResponseWriter, access seal.Access) error { +func (b *RaftBackend) SnapshotHTTP(out *logical.HTTPResponseWriter, access *seal.Access) error { out.Header().Add("Content-Disposition", "attachment") out.Header().Add("Content-Type", "application/gzip") @@ -1377,7 +1374,7 @@ func (b *RaftBackend) SnapshotHTTP(out *logical.HTTPResponseWriter, access seal. // Snapshot takes a raft snapshot, packages it into a archive file and writes it // to the provided writer. Seal access is used to encrypt the SHASUM file so we // can validate the snapshot was taken using the same root keys or not. -func (b *RaftBackend) Snapshot(out io.Writer, access seal.Access) error { +func (b *RaftBackend) Snapshot(out io.Writer, access *seal.Access) error { b.l.RLock() defer b.l.RUnlock() @@ -1401,7 +1398,7 @@ func (b *RaftBackend) Snapshot(out io.Writer, access seal.Access) error { // access is used to decrypt the SHASUM file in the archive to ensure this // snapshot has the same root key as the running instance. If the provided // access is nil then it will skip that validation. -func (b *RaftBackend) WriteSnapshotToTemp(in io.ReadCloser, access seal.Access) (*os.File, func(), raft.SnapshotMeta, error) { +func (b *RaftBackend) WriteSnapshotToTemp(in io.ReadCloser, access *seal.Access) (*os.File, func(), raft.SnapshotMeta, error) { b.l.RLock() defer b.l.RUnlock() @@ -1894,7 +1891,7 @@ func (l *RaftLock) Value() (bool, string, error) { // sealer implements the snapshot.Sealer interface and is used in the snapshot // process for encrypting/decrypting the SHASUM file in snapshot archives. type sealer struct { - access seal.Access + access *seal.Access } // Seal encrypts the data with using the seal access object. diff --git a/physical/raft/raft_autopilot.go b/physical/raft/raft_autopilot.go index ca0ee759e93acf..ece180509c9e4b 100644 --- a/physical/raft/raft_autopilot.go +++ b/physical/raft/raft_autopilot.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/raft_test.go b/physical/raft/raft_test.go index 73d0ce32c54345..15f80f33e31883 100644 --- a/physical/raft/raft_test.go +++ b/physical/raft/raft_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/raft_util.go b/physical/raft/raft_util.go index bd496dfac64bd0..34570fba678f52 100644 --- a/physical/raft/raft_util.go +++ b/physical/raft/raft_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package raft diff --git a/physical/raft/snapshot.go b/physical/raft/snapshot.go index 68d9c953f8199a..cebcdb0a4a82fd 100644 --- a/physical/raft/snapshot.go +++ b/physical/raft/snapshot.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/snapshot_test.go b/physical/raft/snapshot_test.go index 3472c8d53981e1..0f8ba8fd6e8e63 100644 --- a/physical/raft/snapshot_test.go +++ b/physical/raft/snapshot_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/streamlayer.go b/physical/raft/streamlayer.go index 90d8e495cbaf2f..ed154f8bcdaf47 100644 --- a/physical/raft/streamlayer.go +++ b/physical/raft/streamlayer.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/streamlayer_test.go b/physical/raft/streamlayer_test.go index d826eaadca759a..51a26f832266a0 100644 --- a/physical/raft/streamlayer_test.go +++ b/physical/raft/streamlayer_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/testing.go b/physical/raft/testing.go index ea6847911f2bb3..6f6f2b1e70146a 100644 --- a/physical/raft/testing.go +++ b/physical/raft/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package raft import ( diff --git a/physical/raft/types.pb.go b/physical/raft/types.pb.go index 2835e1f17d9687..35e610a47d2517 100644 --- a/physical/raft/types.pb.go +++ b/physical/raft/types.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/physical/raft/types.proto b/physical/raft/types.proto index bb3d136e10ebf9..0b1d189ef6e5d1 100644 --- a/physical/raft/types.proto +++ b/physical/raft/types.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/physical/raft"; diff --git a/physical/raft/vars_32bit.go b/physical/raft/vars_32bit.go index 6e5c51fe935293..c9662e796c5637 100644 --- a/physical/raft/vars_32bit.go +++ b/physical/raft/vars_32bit.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build 386 || arm || windows package raft diff --git a/physical/raft/vars_64bit.go b/physical/raft/vars_64bit.go index a1eea0febc08df..40efb4c08910a7 100644 --- a/physical/raft/vars_64bit.go +++ b/physical/raft/vars_64bit.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !386 && !arm && !windows package raft diff --git a/physical/s3/s3.go b/physical/s3/s3.go index 0cb8e0af3552ef..e9e1fd33789a99 100644 --- a/physical/s3/s3.go +++ b/physical/s3/s3.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package s3 import ( diff --git a/physical/s3/s3_test.go b/physical/s3/s3_test.go index 139e41dfad98fd..ee2c5c547ba95a 100644 --- a/physical/s3/s3_test.go +++ b/physical/s3/s3_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package s3 import ( diff --git a/physical/spanner/spanner.go b/physical/spanner/spanner.go index b84e0d4637b4c1..723b788199f7a4 100644 --- a/physical/spanner/spanner.go +++ b/physical/spanner/spanner.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package spanner import ( diff --git a/physical/spanner/spanner_ha.go b/physical/spanner/spanner_ha.go index d116be0ba34e40..7aa4f8986dbd8b 100644 --- a/physical/spanner/spanner_ha.go +++ b/physical/spanner/spanner_ha.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package spanner import ( diff --git a/physical/spanner/spanner_ha_test.go b/physical/spanner/spanner_ha_test.go index dad39ad4c95b45..49a818b393cc51 100644 --- a/physical/spanner/spanner_ha_test.go +++ b/physical/spanner/spanner_ha_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package spanner import ( diff --git a/physical/spanner/spanner_test.go b/physical/spanner/spanner_test.go index 4b7c1c46b11492..d484dd316cb58f 100644 --- a/physical/spanner/spanner_test.go +++ b/physical/spanner/spanner_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package spanner import ( diff --git a/physical/swift/swift.go b/physical/swift/swift.go index d616bfe35b3202..2155d44c8aba3c 100644 --- a/physical/swift/swift.go +++ b/physical/swift/swift.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package swift import ( diff --git a/physical/swift/swift_test.go b/physical/swift/swift_test.go index 8f8af160fefd2d..0e569c5a9fae9f 100644 --- a/physical/swift/swift_test.go +++ b/physical/swift/swift_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package swift import ( diff --git a/physical/zookeeper/zookeeper.go b/physical/zookeeper/zookeeper.go index e52ac9b6303386..28204ad2bd6671 100644 --- a/physical/zookeeper/zookeeper.go +++ b/physical/zookeeper/zookeeper.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package zookeeper import ( diff --git a/physical/zookeeper/zookeeper_test.go b/physical/zookeeper/zookeeper_test.go index e4448bf73ab7c9..c004a4e948826b 100644 --- a/physical/zookeeper/zookeeper_test.go +++ b/physical/zookeeper/zookeeper_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package zookeeper import ( diff --git a/plugins/database/cassandra/cassandra-database-plugin/main.go b/plugins/database/cassandra/cassandra-database-plugin/main.go index 8a91d1b50c7f32..4ee0903642e083 100644 --- a/plugins/database/cassandra/cassandra-database-plugin/main.go +++ b/plugins/database/cassandra/cassandra-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/cassandra/cassandra.go b/plugins/database/cassandra/cassandra.go index 8118fa06171bd9..de549261fd60fc 100644 --- a/plugins/database/cassandra/cassandra.go +++ b/plugins/database/cassandra/cassandra.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cassandra import ( diff --git a/plugins/database/cassandra/cassandra_test.go b/plugins/database/cassandra/cassandra_test.go index 7a3260935b7c4e..ec8b42290d30b3 100644 --- a/plugins/database/cassandra/cassandra_test.go +++ b/plugins/database/cassandra/cassandra_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cassandra import ( diff --git a/plugins/database/cassandra/connection_producer.go b/plugins/database/cassandra/connection_producer.go index a63ed27d5eb3fb..72f7bb878e162b 100644 --- a/plugins/database/cassandra/connection_producer.go +++ b/plugins/database/cassandra/connection_producer.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cassandra import ( diff --git a/plugins/database/cassandra/connection_producer_test.go b/plugins/database/cassandra/connection_producer_test.go index e2f4ba0fc59f6d..3f99c1d65a0092 100644 --- a/plugins/database/cassandra/connection_producer_test.go +++ b/plugins/database/cassandra/connection_producer_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cassandra import ( diff --git a/plugins/database/cassandra/test-fixtures/no_tls/cassandra.yaml b/plugins/database/cassandra/test-fixtures/no_tls/cassandra.yaml index 481996968866bd..71fdead51f2385 100644 --- a/plugins/database/cassandra/test-fixtures/no_tls/cassandra.yaml +++ b/plugins/database/cassandra/test-fixtures/no_tls/cassandra.yaml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # Cassandra storage config YAML # NOTE: diff --git a/plugins/database/cassandra/tls.go b/plugins/database/cassandra/tls.go index 17e148d7496d5b..cc64d3c3b5f485 100644 --- a/plugins/database/cassandra/tls.go +++ b/plugins/database/cassandra/tls.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cassandra import ( diff --git a/plugins/database/hana/hana-database-plugin/main.go b/plugins/database/hana/hana-database-plugin/main.go index 9ec568b66db40b..2057c36c08d401 100644 --- a/plugins/database/hana/hana-database-plugin/main.go +++ b/plugins/database/hana/hana-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/hana/hana.go b/plugins/database/hana/hana.go index 987cc1af2f812b..bca437c369a69d 100644 --- a/plugins/database/hana/hana.go +++ b/plugins/database/hana/hana.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package hana import ( diff --git a/plugins/database/hana/hana_test.go b/plugins/database/hana/hana_test.go index 6a3c1dbe07d4b9..67c1088834897b 100644 --- a/plugins/database/hana/hana_test.go +++ b/plugins/database/hana/hana_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package hana import ( diff --git a/plugins/database/influxdb/connection_producer.go b/plugins/database/influxdb/connection_producer.go index b9f18c5433863e..a9a6964ea2101d 100644 --- a/plugins/database/influxdb/connection_producer.go +++ b/plugins/database/influxdb/connection_producer.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package influxdb import ( diff --git a/plugins/database/influxdb/influxdb-database-plugin/main.go b/plugins/database/influxdb/influxdb-database-plugin/main.go index bfc94f75fd31f2..c8f6c5fa1e3fdd 100644 --- a/plugins/database/influxdb/influxdb-database-plugin/main.go +++ b/plugins/database/influxdb/influxdb-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/influxdb/influxdb.go b/plugins/database/influxdb/influxdb.go index f216319a6c13c0..4a8225e52b3be2 100644 --- a/plugins/database/influxdb/influxdb.go +++ b/plugins/database/influxdb/influxdb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package influxdb import ( diff --git a/plugins/database/influxdb/influxdb_test.go b/plugins/database/influxdb/influxdb_test.go index 1fc858bfc5c85a..4ecdac51bcbd4b 100644 --- a/plugins/database/influxdb/influxdb_test.go +++ b/plugins/database/influxdb/influxdb_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package influxdb import ( @@ -9,15 +6,14 @@ import ( "net/url" "os" "reflect" - "runtime" "strconv" "strings" "testing" "time" - "github.com/hashicorp/vault/sdk/database/dbplugin/v5" + "github.com/hashicorp/vault/helper/testhelpers/docker" + dbplugin "github.com/hashicorp/vault/sdk/database/dbplugin/v5" dbtesting "github.com/hashicorp/vault/sdk/database/dbplugin/v5/testing" - "github.com/hashicorp/vault/sdk/helper/docker" influx "github.com/influxdata/influxdb1-client/v2" "github.com/stretchr/testify/require" ) @@ -52,11 +48,6 @@ func (c *Config) connectionParams() map[string]interface{} { } func prepareInfluxdbTestContainer(t *testing.T) (func(), *Config) { - // Skipping on ARM, as this image can't run on ARM architecture - if strings.Contains(runtime.GOARCH, "arm") { - t.Skip("Skipping, as this image is not supported on ARM architectures") - } - c := &Config{ Username: "influx-root", Password: "influx-root", @@ -67,9 +58,8 @@ func prepareInfluxdbTestContainer(t *testing.T) (func(), *Config) { } runner, err := docker.NewServiceRunner(docker.RunOptions{ - ImageRepo: "docker.mirror.hashicorp.services/influxdb", - ContainerName: "influxdb", - ImageTag: "1.8-alpine", + ImageRepo: "influxdb", + ImageTag: "1.8-alpine", Env: []string{ "INFLUXDB_DB=vault", "INFLUXDB_ADMIN_USER=" + c.Username, diff --git a/plugins/database/mongodb/cert_helpers_test.go b/plugins/database/mongodb/cert_helpers_test.go index 9f9388b1cf2b47..deb04ab9c4e421 100644 --- a/plugins/database/mongodb/cert_helpers_test.go +++ b/plugins/database/mongodb/cert_helpers_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mongodb import ( diff --git a/plugins/database/mongodb/connection_producer.go b/plugins/database/mongodb/connection_producer.go index 4686c3b13f1198..348fb6bd4d435c 100644 --- a/plugins/database/mongodb/connection_producer.go +++ b/plugins/database/mongodb/connection_producer.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mongodb import ( diff --git a/plugins/database/mongodb/connection_producer_test.go b/plugins/database/mongodb/connection_producer_test.go index 2ce3872c597f30..529e4d22fb09ff 100644 --- a/plugins/database/mongodb/connection_producer_test.go +++ b/plugins/database/mongodb/connection_producer_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mongodb import ( diff --git a/plugins/database/mongodb/mongodb-database-plugin/main.go b/plugins/database/mongodb/mongodb-database-plugin/main.go index fe68659eca6332..30dd5fdd7cff78 100644 --- a/plugins/database/mongodb/mongodb-database-plugin/main.go +++ b/plugins/database/mongodb/mongodb-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/mongodb/mongodb.go b/plugins/database/mongodb/mongodb.go index 4026fbc693da4e..6cb511b89f8e4a 100644 --- a/plugins/database/mongodb/mongodb.go +++ b/plugins/database/mongodb/mongodb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mongodb import ( diff --git a/plugins/database/mongodb/mongodb_test.go b/plugins/database/mongodb/mongodb_test.go index 4f36e36179ec46..dcfda3bc0576e4 100644 --- a/plugins/database/mongodb/mongodb_test.go +++ b/plugins/database/mongodb/mongodb_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mongodb import ( @@ -8,7 +5,6 @@ import ( "crypto/tls" "crypto/x509" "fmt" - "net/http" "reflect" "strings" "sync" @@ -386,8 +382,6 @@ func appendToCertPool(t *testing.T, pool *x509.CertPool, caPem []byte) *x509.Cer } var cmpClientOptionsOpts = cmp.Options{ - cmpopts.IgnoreTypes(http.Transport{}), - cmp.AllowUnexported(options.ClientOptions{}), cmp.AllowUnexported(tls.Config{}), diff --git a/plugins/database/mongodb/util.go b/plugins/database/mongodb/util.go index be5842136bb633..a12828f503b8c1 100644 --- a/plugins/database/mongodb/util.go +++ b/plugins/database/mongodb/util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mongodb import "go.mongodb.org/mongo-driver/mongo/writeconcern" diff --git a/plugins/database/mssql/mssql-database-plugin/main.go b/plugins/database/mssql/mssql-database-plugin/main.go index 2a57b5746ec610..37a81a660012fc 100644 --- a/plugins/database/mssql/mssql-database-plugin/main.go +++ b/plugins/database/mssql/mssql-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/mssql/mssql.go b/plugins/database/mssql/mssql.go index 7c7a4c27b3b7ce..7915732b4d00a6 100644 --- a/plugins/database/mssql/mssql.go +++ b/plugins/database/mssql/mssql.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mssql import ( diff --git a/plugins/database/mssql/mssql_test.go b/plugins/database/mssql/mssql_test.go index 385c5f0b69ff04..2292490d88a7ab 100644 --- a/plugins/database/mssql/mssql_test.go +++ b/plugins/database/mssql/mssql_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mssql import ( diff --git a/plugins/database/mysql/connection_producer.go b/plugins/database/mysql/connection_producer.go index 5c59792131660e..208debe519de5e 100644 --- a/plugins/database/mysql/connection_producer.go +++ b/plugins/database/mysql/connection_producer.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mysql import ( diff --git a/plugins/database/mysql/connection_producer_test.go b/plugins/database/mysql/connection_producer_test.go index a3f0bc7ef56e0d..eacf18fabe8c81 100644 --- a/plugins/database/mysql/connection_producer_test.go +++ b/plugins/database/mysql/connection_producer_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mysql import ( diff --git a/plugins/database/mysql/mysql-database-plugin/main.go b/plugins/database/mysql/mysql-database-plugin/main.go index 56640b2f7b13f4..6b1505aff194c0 100644 --- a/plugins/database/mysql/mysql-database-plugin/main.go +++ b/plugins/database/mysql/mysql-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/mysql/mysql-legacy-database-plugin/main.go b/plugins/database/mysql/mysql-legacy-database-plugin/main.go index 8aeba0b36bdc42..ea6b9839a77e3b 100644 --- a/plugins/database/mysql/mysql-legacy-database-plugin/main.go +++ b/plugins/database/mysql/mysql-legacy-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/mysql/mysql.go b/plugins/database/mysql/mysql.go index 0260ec20d21643..db47c71dd3108b 100644 --- a/plugins/database/mysql/mysql.go +++ b/plugins/database/mysql/mysql.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mysql import ( diff --git a/plugins/database/mysql/mysql_test.go b/plugins/database/mysql/mysql_test.go index 07e0165ffe4901..3c7eab5af35769 100644 --- a/plugins/database/mysql/mysql_test.go +++ b/plugins/database/mysql/mysql_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mysql import ( diff --git a/plugins/database/postgresql/passwordauthentication.go b/plugins/database/postgresql/passwordauthentication.go deleted file mode 100644 index ec94bafba1180b..00000000000000 --- a/plugins/database/postgresql/passwordauthentication.go +++ /dev/null @@ -1,25 +0,0 @@ -package postgresql - -import "fmt" - -// passwordAuthentication determines whether to send passwords in plaintext (password) or hashed (scram-sha-256). -type passwordAuthentication string - -var ( - // passwordAuthenticationPassword is the default. If set, passwords will be sent to PostgreSQL in plain text. - passwordAuthenticationPassword passwordAuthentication = "password" - passwordAuthenticationSCRAMSHA256 passwordAuthentication = "scram-sha-256" -) - -var passwordAuthentications = map[passwordAuthentication]struct{}{ - passwordAuthenticationSCRAMSHA256: {}, - passwordAuthenticationPassword: {}, -} - -func parsePasswordAuthentication(s string) (passwordAuthentication, error) { - if _, ok := passwordAuthentications[passwordAuthentication(s)]; !ok { - return "", fmt.Errorf("'%s' is not a valid password authentication type", s) - } - - return passwordAuthentication(s), nil -} diff --git a/plugins/database/postgresql/postgresql-database-plugin/main.go b/plugins/database/postgresql/postgresql-database-plugin/main.go index f543167d4a3bef..75b5fd9babb2ac 100644 --- a/plugins/database/postgresql/postgresql-database-plugin/main.go +++ b/plugins/database/postgresql/postgresql-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/postgresql/postgresql.go b/plugins/database/postgresql/postgresql.go index 66c44cc34a08c4..6a350212c5f5da 100644 --- a/plugins/database/postgresql/postgresql.go +++ b/plugins/database/postgresql/postgresql.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package postgresql import ( @@ -12,7 +9,6 @@ import ( "github.com/hashicorp/go-multierror" "github.com/hashicorp/go-secure-stdlib/strutil" - "github.com/hashicorp/vault/plugins/database/postgresql/scram" "github.com/hashicorp/vault/sdk/database/dbplugin/v5" "github.com/hashicorp/vault/sdk/database/helper/connutil" "github.com/hashicorp/vault/sdk/database/helper/dbutil" @@ -69,8 +65,7 @@ func new() *PostgreSQL { connProducer.Type = postgreSQLTypeName db := &PostgreSQL{ - SQLConnectionProducer: connProducer, - passwordAuthentication: passwordAuthenticationPassword, + SQLConnectionProducer: connProducer, } return db @@ -79,8 +74,7 @@ func new() *PostgreSQL { type PostgreSQL struct { *connutil.SQLConnectionProducer - usernameProducer template.StringTemplate - passwordAuthentication passwordAuthentication + usernameProducer template.StringTemplate } func (p *PostgreSQL) Initialize(ctx context.Context, req dbplugin.InitializeRequest) (dbplugin.InitializeResponse, error) { @@ -108,20 +102,6 @@ func (p *PostgreSQL) Initialize(ctx context.Context, req dbplugin.InitializeRequ return dbplugin.InitializeResponse{}, fmt.Errorf("invalid username template: %w", err) } - passwordAuthenticationRaw, err := strutil.GetString(req.Config, "password_authentication") - if err != nil { - return dbplugin.InitializeResponse{}, fmt.Errorf("failed to retrieve password_authentication: %w", err) - } - - if passwordAuthenticationRaw != "" { - pwAuthentication, err := parsePasswordAuthentication(passwordAuthenticationRaw) - if err != nil { - return dbplugin.InitializeResponse{}, err - } - - p.passwordAuthentication = pwAuthentication - } - resp := dbplugin.InitializeResponse{ Config: newConf, } @@ -205,15 +185,6 @@ func (p *PostgreSQL) changeUserPassword(ctx context.Context, username string, ch "username": username, "password": password, } - - if p.passwordAuthentication == passwordAuthenticationSCRAMSHA256 { - hashedPassword, err := scram.Hash(password) - if err != nil { - return fmt.Errorf("unable to scram-sha256 password: %w", err) - } - m["password"] = hashedPassword - } - if err := dbtxn.ExecuteTxQueryDirect(ctx, tx, m, query); err != nil { return fmt.Errorf("failed to execute query: %w", err) } @@ -298,24 +269,15 @@ func (p *PostgreSQL) NewUser(ctx context.Context, req dbplugin.NewUserRequest) ( } defer tx.Rollback() - m := map[string]string{ - "name": username, - "username": username, - "password": req.Password, - "expiration": expirationStr, - } - - if p.passwordAuthentication == passwordAuthenticationSCRAMSHA256 { - hashedPassword, err := scram.Hash(req.Password) - if err != nil { - return dbplugin.NewUserResponse{}, fmt.Errorf("unable to scram-sha256 password: %w", err) - } - m["password"] = hashedPassword - } - for _, stmt := range req.Statements.Commands { if containsMultilineStatement(stmt) { // Execute it as-is. + m := map[string]string{ + "name": username, + "username": username, + "password": req.Password, + "expiration": expirationStr, + } if err := dbtxn.ExecuteTxQueryDirect(ctx, tx, m, stmt); err != nil { return dbplugin.NewUserResponse{}, fmt.Errorf("failed to execute query: %w", err) } @@ -328,6 +290,12 @@ func (p *PostgreSQL) NewUser(ctx context.Context, req dbplugin.NewUserRequest) ( continue } + m := map[string]string{ + "name": username, + "username": username, + "password": req.Password, + "expiration": expirationStr, + } if err := dbtxn.ExecuteTxQueryDirect(ctx, tx, m, query); err != nil { return dbplugin.NewUserResponse{}, fmt.Errorf("failed to execute query: %w", err) } diff --git a/plugins/database/postgresql/postgresql_test.go b/plugins/database/postgresql/postgresql_test.go index 5e89ee912c18c6..8a9cbeb39ba9f1 100644 --- a/plugins/database/postgresql/postgresql_test.go +++ b/plugins/database/postgresql/postgresql_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package postgresql import ( @@ -12,13 +9,12 @@ import ( "testing" "time" + "github.com/hashicorp/vault/helper/testhelpers/docker" "github.com/hashicorp/vault/helper/testhelpers/postgresql" "github.com/hashicorp/vault/sdk/database/dbplugin/v5" dbtesting "github.com/hashicorp/vault/sdk/database/dbplugin/v5/testing" "github.com/hashicorp/vault/sdk/database/helper/dbutil" - "github.com/hashicorp/vault/sdk/helper/docker" "github.com/hashicorp/vault/sdk/helper/template" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -94,97 +90,6 @@ func TestPostgreSQL_Initialize_ConnURLWithDSNFormat(t *testing.T) { } } -// TestPostgreSQL_PasswordAuthentication tests that the default "password_authentication" is "none", and that -// an error is returned if an invalid "password_authentication" is provided. -func TestPostgreSQL_PasswordAuthentication(t *testing.T) { - cleanup, connURL := postgresql.PrepareTestContainer(t, "13.4-buster") - defer cleanup() - - dsnConnURL, err := dbutil.ParseURL(connURL) - assert.NoError(t, err) - db := new() - - ctx := context.Background() - - t.Run("invalid-password-authentication", func(t *testing.T) { - connectionDetails := map[string]interface{}{ - "connection_url": dsnConnURL, - "password_authentication": "invalid-password-authentication", - } - - req := dbplugin.InitializeRequest{ - Config: connectionDetails, - VerifyConnection: true, - } - - _, err := db.Initialize(ctx, req) - assert.EqualError(t, err, "'invalid-password-authentication' is not a valid password authentication type") - }) - - t.Run("default-is-none", func(t *testing.T) { - connectionDetails := map[string]interface{}{ - "connection_url": dsnConnURL, - } - - req := dbplugin.InitializeRequest{ - Config: connectionDetails, - VerifyConnection: true, - } - - _ = dbtesting.AssertInitialize(t, db, req) - assert.Equal(t, passwordAuthenticationPassword, db.passwordAuthentication) - }) -} - -// TestPostgreSQL_PasswordAuthentication_SCRAMSHA256 tests that password_authentication works when set to scram-sha-256. -// When sending an encrypted password, the raw password should still successfully authenticate the user. -func TestPostgreSQL_PasswordAuthentication_SCRAMSHA256(t *testing.T) { - cleanup, connURL := postgresql.PrepareTestContainer(t, "13.4-buster") - defer cleanup() - - dsnConnURL, err := dbutil.ParseURL(connURL) - if err != nil { - t.Fatal(err) - } - - connectionDetails := map[string]interface{}{ - "connection_url": dsnConnURL, - "password_authentication": string(passwordAuthenticationSCRAMSHA256), - } - - req := dbplugin.InitializeRequest{ - Config: connectionDetails, - VerifyConnection: true, - } - - db := new() - resp := dbtesting.AssertInitialize(t, db, req) - assert.Equal(t, string(passwordAuthenticationSCRAMSHA256), resp.Config["password_authentication"]) - - if !db.Initialized { - t.Fatal("Database should be initialized") - } - - ctx := context.Background() - newUserRequest := dbplugin.NewUserRequest{ - Statements: dbplugin.Statements{ - Commands: []string{ - ` - CREATE ROLE "{{name}}" WITH - LOGIN - PASSWORD '{{password}}' - VALID UNTIL '{{expiration}}'; - GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO "{{name}}";`, - }, - }, - Password: "somesecurepassword", - Expiration: time.Now().Add(1 * time.Minute), - } - newUserResponse, err := db.NewUser(ctx, newUserRequest) - - assertCredsExist(t, db.ConnectionURL, newUserResponse.Username, newUserRequest.Password) -} - func TestPostgreSQL_NewUser(t *testing.T) { type testCase struct { req dbplugin.NewUserRequest diff --git a/plugins/database/postgresql/scram/LICENSE b/plugins/database/postgresql/scram/LICENSE deleted file mode 100644 index cc36995f299f48..00000000000000 --- a/plugins/database/postgresql/scram/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Taishi Kasuga - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/plugins/database/postgresql/scram/scram.go b/plugins/database/postgresql/scram/scram.go deleted file mode 100644 index f5c6923cef662a..00000000000000 --- a/plugins/database/postgresql/scram/scram.go +++ /dev/null @@ -1,86 +0,0 @@ -package scram - -// -// @see https://github.com/postgres/postgres/blob/c30f54ad732ca5c8762bb68bbe0f51de9137dd72/src/interfaces/libpq/fe-auth.c#L1167-L1285 -// @see https://github.com/postgres/postgres/blob/e6bdfd9700ebfc7df811c97c2fc46d7e94e329a2/src/interfaces/libpq/fe-auth-scram.c#L868-L905 -// @see https://github.com/postgres/postgres/blob/c30f54ad732ca5c8762bb68bbe0f51de9137dd72/src/port/pg_strong_random.c#L66-L96 -// @see https://github.com/postgres/postgres/blob/e6bdfd9700ebfc7df811c97c2fc46d7e94e329a2/src/common/scram-common.c#L160-L274 -// @see https://github.com/postgres/postgres/blob/e6bdfd9700ebfc7df811c97c2fc46d7e94e329a2/src/common/scram-common.c#L27-L85 - -// Implementation from https://github.com/supercaracal/scram-sha-256/blob/d3c05cd927770a11c6e12de3e3a99c3446a1f78d/main.go -import ( - "crypto/hmac" - "crypto/rand" - "crypto/sha256" - "encoding/base64" - "fmt" - "io" - - "golang.org/x/crypto/pbkdf2" -) - -const ( - // @see https://github.com/postgres/postgres/blob/e6bdfd9700ebfc7df811c97c2fc46d7e94e329a2/src/include/common/scram-common.h#L36-L41 - saltSize = 16 - - // @see https://github.com/postgres/postgres/blob/c30f54ad732ca5c8762bb68bbe0f51de9137dd72/src/include/common/sha2.h#L22 - digestLen = 32 - - // @see https://github.com/postgres/postgres/blob/e6bdfd9700ebfc7df811c97c2fc46d7e94e329a2/src/include/common/scram-common.h#L43-L47 - iterationCnt = 4096 -) - -var ( - clientRawKey = []byte("Client Key") - serverRawKey = []byte("Server Key") -) - -func genSalt(size int) ([]byte, error) { - salt := make([]byte, size) - if _, err := io.ReadFull(rand.Reader, salt); err != nil { - return nil, err - } - return salt, nil -} - -func encodeB64(src []byte) (dst []byte) { - dst = make([]byte, base64.StdEncoding.EncodedLen(len(src))) - base64.StdEncoding.Encode(dst, src) - return -} - -func getHMACSum(key, msg []byte) []byte { - h := hmac.New(sha256.New, key) - _, _ = h.Write(msg) - return h.Sum(nil) -} - -func getSHA256Sum(key []byte) []byte { - h := sha256.New() - _, _ = h.Write(key) - return h.Sum(nil) -} - -func hashPassword(rawPassword, salt []byte, iter, keyLen int) string { - digestKey := pbkdf2.Key(rawPassword, salt, iter, keyLen, sha256.New) - clientKey := getHMACSum(digestKey, clientRawKey) - storedKey := getSHA256Sum(clientKey) - serverKey := getHMACSum(digestKey, serverRawKey) - - return fmt.Sprintf("SCRAM-SHA-256$%d:%s$%s:%s", - iter, - string(encodeB64(salt)), - string(encodeB64(storedKey)), - string(encodeB64(serverKey)), - ) -} - -func Hash(password string) (string, error) { - salt, err := genSalt(saltSize) - if err != nil { - return "", err - } - - hashedPassword := hashPassword([]byte(password), salt, iterationCnt, digestLen) - return hashedPassword, nil -} diff --git a/plugins/database/postgresql/scram/scram_test.go b/plugins/database/postgresql/scram/scram_test.go deleted file mode 100644 index d2933ebbca4047..00000000000000 --- a/plugins/database/postgresql/scram/scram_test.go +++ /dev/null @@ -1,27 +0,0 @@ -package scram - -import ( - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -// TestScram tests the Hash method. The hashed password string should have a SCRAM-SHA-256 prefix. -func TestScram(t *testing.T) { - tcs := map[string]struct { - Password string - }{ - "empty-password": {Password: ""}, - "simple-password": {Password: "password"}, - } - - for name, tc := range tcs { - t.Run(name, func(t *testing.T) { - got, err := Hash(tc.Password) - assert.NoError(t, err) - assert.True(t, strings.HasPrefix(got, "SCRAM-SHA-256$4096:")) - assert.Len(t, got, 133) - }) - } -} diff --git a/plugins/database/redshift/redshift-database-plugin/main.go b/plugins/database/redshift/redshift-database-plugin/main.go index 7fcd9b0b648794..8d2f796eeab3fd 100644 --- a/plugins/database/redshift/redshift-database-plugin/main.go +++ b/plugins/database/redshift/redshift-database-plugin/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/plugins/database/redshift/redshift.go b/plugins/database/redshift/redshift.go index 11ce30a73f3386..ce39569d4858c5 100644 --- a/plugins/database/redshift/redshift.go +++ b/plugins/database/redshift/redshift.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package redshift import ( diff --git a/plugins/database/redshift/redshift_test.go b/plugins/database/redshift/redshift_test.go index af264587b6cd3c..24992183e4b2b3 100644 --- a/plugins/database/redshift/redshift_test.go +++ b/plugins/database/redshift/redshift_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package redshift import ( diff --git a/scan.hcl b/scan.hcl index 7553139d17f34a..2c3c6312119219 100644 --- a/scan.hcl +++ b/scan.hcl @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - repository { go_modules = true osv = true diff --git a/scripts/assetcheck.sh b/scripts/assetcheck.sh index d846dd5f9f4ca2..7100f84d9faef6 100755 --- a/scripts/assetcheck.sh +++ b/scripts/assetcheck.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - if [[ ! -e http/web_ui/index.html ]] then diff --git a/scripts/build.sh b/scripts/build.sh index cf990fa265c6b6..1856389cc6cffe 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # # This script builds the application from source for multiple platforms. set -e @@ -50,13 +47,11 @@ ${GO_CMD} build \ # Move all the compiled things to the $GOPATH/bin OLDIFS=$IFS -IFS=: FIRST=($GOPATH) BIN_PATH=${GOBIN:-${FIRST}/bin} +IFS=: MAIN_GOPATH=($GOPATH) IFS=$OLDIFS -# Ensure the go bin folder exists -mkdir -p ${BIN_PATH} -rm -f ${BIN_PATH}/vault -cp bin/vault ${BIN_PATH} +rm -f ${MAIN_GOPATH}/bin/vault +cp bin/vault ${MAIN_GOPATH}/bin/ # Done! echo diff --git a/scripts/ci-helper.sh b/scripts/ci-helper.sh index 4e33a8e8aa775b..227eccfc664e8e 100755 --- a/scripts/ci-helper.sh +++ b/scripts/ci-helper.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # The ci-helper is used to determine build metadata, build Vault binaries, # package those binaries into artifacts, and execute tests with those artifacts. @@ -132,9 +129,9 @@ function build_ui() { mkdir -p http/web_ui popd pushd "$repo_root/ui" - yarn install + yarn install --ignore-optional npm rebuild node-sass - yarn run build + yarn --verbose run build popd } diff --git a/scripts/coverage.sh b/scripts/coverage.sh index 7f5d49e534dea3..ad80496d157877 100755 --- a/scripts/coverage.sh +++ b/scripts/coverage.sh @@ -1,7 +1,4 @@ #!/bin/sh -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # Generate test coverage statistics for Go packages. # # Works around the fact that `go test -coverprofile` currently does not work diff --git a/scripts/cross/Dockerfile b/scripts/cross/Dockerfile index c0fddb3b05fdc0..504399c3ff386e 100644 --- a/scripts/cross/Dockerfile +++ b/scripts/cross/Dockerfile @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - FROM debian:buster RUN apt-get update -y && apt-get install --no-install-recommends -y -q \ @@ -15,7 +12,7 @@ RUN apt-get update -y && apt-get install --no-install-recommends -y -q \ libltdl-dev \ libltdl7 -RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - +RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - RUN curl -sL https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list diff --git a/scripts/deprecations-checker.sh b/scripts/deprecations-checker.sh deleted file mode 100755 index 017149e6d7095f..00000000000000 --- a/scripts/deprecations-checker.sh +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - -# This script is sourced into the shell running in a Github Actions workflow. - -# Usage: -# To check deprecations locally using the script, follow these steps: -# From the repository root or within a package folder, execute deprecations-checker.sh -# Optionally: to only show deprecations in changed files between the current branch and -# a specific branch, pass the other branch name as an argument to the script. -# -# For example: -# ./scripts/deprecations-checker.sh (or) make deprecations -# ./scripts/deprecations-checker.sh main (or) make ci-deprecations -# -# If no branch name is specified, the command will show all usage of deprecations in the code. -# -# GitHub Actions runs this against the PR's base ref branch. - -# Staticcheck uses static analysis to finds bugs and performance issues, offers simplifications, -# and enforces style rules. -# Here, it is used to check if a deprecated function, variable, constant or field is used. - -# Run staticcheck -echo "Performing deprecations check: running staticcheck" - -# Identify repository name -if [ -z $2 ]; then - # local repository name - repositoryName=$(basename `git rev-parse --show-toplevel`) -else - # github repository name from deprecated-functions-checker.yml - repositoryName=$2 -fi - -# Modify the command with the correct build tag based on repository -if [ $repositoryName == "vault-enterprise" ]; then - staticcheckCommand=$(echo "staticcheck ./... -tags=enterprise") -else - staticcheckCommand=$(echo "staticcheck ./...") -fi - -# If no compare branch name is specified, output all deprecations -# Else only output the deprecations from the changes added -if [ -z $1 ] - then - $staticcheckCommand | grep deprecated - else - # GitHub Actions will use this to find only changes wrt PR's base ref branch - # revgrep CLI tool will return an exit status of 1 if any issues match, else it will return 0 - $staticcheckCommand | grep deprecated 2>&1 | revgrep "$(git merge-base HEAD "origin/$1")" -fi diff --git a/scripts/deps_upgrade.py b/scripts/deps_upgrade.py index edd1b52f103987..9531696cee5654 100644 --- a/scripts/deps_upgrade.py +++ b/scripts/deps_upgrade.py @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - import os import sys diff --git a/scripts/dist.sh b/scripts/dist.sh index fc605d4fdd9943..e9891b059e1c99 100755 --- a/scripts/dist.sh +++ b/scripts/dist.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e # Get the version from the command line diff --git a/scripts/docker/Dockerfile b/scripts/docker/Dockerfile index ceb6ec6ee73dda..6208badf4bc89c 100644 --- a/scripts/docker/Dockerfile +++ b/scripts/docker/Dockerfile @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # Multi-stage builder to avoid polluting users environment with wrong # architecture binaries. ARG VERSION diff --git a/scripts/docker/Dockerfile.ui b/scripts/docker/Dockerfile.ui index cac96929b43dd2..b13f0fe1fbd4ea 100644 --- a/scripts/docker/Dockerfile.ui +++ b/scripts/docker/Dockerfile.ui @@ -19,7 +19,7 @@ RUN apt-get update -y && apt-get install --no-install-recommends -y -q \ libltdl-dev \ libltdl7 -RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - +RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - RUN curl -sL https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list diff --git a/scripts/docker/docker-entrypoint.sh b/scripts/docker/docker-entrypoint.sh index 2b9b8f35a1606f..3b72da25b7f412 100755 --- a/scripts/docker/docker-entrypoint.sh +++ b/scripts/docker/docker-entrypoint.sh @@ -1,7 +1,4 @@ #!/usr/bin/dumb-init /bin/sh -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e # Note above that we run dumb-init as PID 1 in order to reap zombie processes diff --git a/scripts/gen_openapi.sh b/scripts/gen_openapi.sh index de4589877d3629..b92fb883b9adb6 100755 --- a/scripts/gen_openapi.sh +++ b/scripts/gen_openapi.sh @@ -1,7 +1,4 @@ #!/bin/bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e @@ -24,67 +21,97 @@ then fi vault server -dev -dev-root-token-id=root & -sleep 5 +sleep 2 VAULT_PID=$! -defer_stop_vault() { - echo "Stopping Vault..." - kill $VAULT_PID - sleep 1 -} - -trap defer_stop_vault INT TERM EXIT - export VAULT_ADDR=http://127.0.0.1:8200 echo "Mounting all builtin plugins..." # Enable auth plugins -vault auth enable "alicloud" -vault auth enable "approle" -vault auth enable "aws" -vault auth enable "azure" -vault auth enable "centrify" -vault auth enable "cert" -vault auth enable "cf" -vault auth enable "gcp" -vault auth enable "github" -vault auth enable "jwt" -vault auth enable "kerberos" -vault auth enable "kubernetes" -vault auth enable "ldap" -vault auth enable "oci" -vault auth enable "okta" -vault auth enable "radius" -vault auth enable "userpass" +codeLinesStarted=false + +while read -r line; do + if [[ $line == *"credentialBackends:"* ]] ; then + codeLinesStarted=true + elif [[ $line == *"databasePlugins:"* ]] ; then + break + elif [ $codeLinesStarted = true ] && [[ $line == *"consts.Deprecated"* || $line == *"consts.PendingRemoval"* || $line == *"consts.Removed"* ]] ; then + auth_plugin_previous="" + elif [ $codeLinesStarted = true ] && [[ $line =~ ^\s*\"(.*)\"\:.*$ ]] ; then + auth_plugin_current=${BASH_REMATCH[1]} + + if [[ -n "${auth_plugin_previous}" ]] ; then + echo "enabling auth plugin: ${auth_plugin_previous}" + vault auth enable "${auth_plugin_previous}" + fi + + auth_plugin_previous="${auth_plugin_current}" + fi +done <../../vault/helper/builtinplugins/registry.go + +if [[ -n "${auth_plugin_previous}" ]] ; then + echo "enabling auth plugin: ${auth_plugin_previous}" + vault auth enable "${auth_plugin_previous}" +fi # Enable secrets plugins -vault secrets enable "alicloud" -vault secrets enable "aws" -vault secrets enable "azure" -vault secrets enable "consul" -vault secrets enable "database" -vault secrets enable "gcp" -vault secrets enable "gcpkms" -vault secrets enable "kubernetes" -vault secrets enable "kv" -vault secrets enable "ldap" -vault secrets enable "mongodbatlas" -vault secrets enable "nomad" -vault secrets enable "pki" -vault secrets enable "rabbitmq" -vault secrets enable "ssh" -vault secrets enable "terraform" -vault secrets enable "totp" -vault secrets enable "transit" +codeLinesStarted=false + +while read -r line; do + if [[ $line == *"logicalBackends:"* ]] ; then + codeLinesStarted=true + elif [[ $line == *"addExternalPlugins("* ]] ; then + break + elif [ $codeLinesStarted = true ] && [[ $line == *"consts.Deprecated"* || $line == *"consts.PendingRemoval"* || $line == *"consts.Removed"* ]] ; then + secrets_plugin_previous="" + elif [ $codeLinesStarted = true ] && [[ $line =~ ^\s*\"(.*)\"\:.*$ ]] ; then + secrets_plugin_current=${BASH_REMATCH[1]} + + if [[ -n "${secrets_plugin_previous}" ]] ; then + echo "enabling secrets plugin: ${secrets_plugin_previous}" + vault secrets enable "${secrets_plugin_previous}" + fi + + secrets_plugin_previous="${secrets_plugin_current}" + fi +done <../../vault/helper/builtinplugins/registry.go + +if [[ -n "${secrets_plugin_previous}" ]] ; then + echo "enabling secrets plugin: ${secrets_plugin_previous}" + vault secrets enable "${secrets_plugin_previous}" +fi # Enable enterprise features -if [[ -n "${VAULT_LICENSE:-}" ]]; then +entRegFile=../../vault/helper/builtinplugins/registry_util_ent.go +if [ -f $entRegFile ] && [[ -n "${VAULT_LICENSE}" ]]; then vault write sys/license text="${VAULT_LICENSE}" - vault secrets enable "keymgmt" - vault secrets enable "kmip" - vault secrets enable "transform" + codeLinesStarted=false + + while read -r line; do + if [[ $line == *"ExternalPluginsEnt:"* ]] ; then + codeLinesStarted=true + elif [[ $line == *"addExtPluginsEntImpl("* ]] ; then + break + elif [ $codeLinesStarted = true ] && [[ $line == *"consts.Deprecated"* || $line == *"consts.PendingRemoval"* || $line == *"consts.Removed"* ]] ; then + secrets_plugin_previous="" + elif [ $codeLinesStarted = true ] && [[ $line =~ ^\s*\"(.*)\"\:.*$ ]] ; then + ent_plugin_current=${BASH_REMATCH[1]} + + if [[ -n "${ent_plugin_previous}" ]] ; then + echo "enabling enterprise plugin: ${ent_plugin_previous}" + vault secrets enable "${ent_plugin_previous}" + fi + + ent_plugin_previous="${ent_plugin_current}" + fi + done <$entRegFile + + if [[ -n "${ent_plugin_previous}" ]] ; then + echo "enabling enterprise plugin: ${ent_plugin_previous}" + vault secrets enable "${ent_plugin_previous}" + fi fi # Output OpenAPI, optionally formatted @@ -98,6 +125,8 @@ else 'http://127.0.0.1:8200/v1/sys/internal/specs/openapi' > openapi.json fi +kill $VAULT_PID +sleep 1 + echo echo "openapi.json generated" -echo diff --git a/scripts/gofmtcheck.sh b/scripts/gofmtcheck.sh index 5c58f178558b94..574f4d7167c949 100755 --- a/scripts/gofmtcheck.sh +++ b/scripts/gofmtcheck.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - echo "==> Checking that code complies with gofmt requirements..." diff --git a/scripts/goversioncheck.sh b/scripts/goversioncheck.sh index 7ee7422581e5ef..6f55260099f2ed 100755 --- a/scripts/goversioncheck.sh +++ b/scripts/goversioncheck.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - GO_CMD=${GO_CMD:-go} diff --git a/scripts/protocversioncheck.sh b/scripts/protocversioncheck.sh index a2cbc6cc3f2796..4b081674806b9b 100755 --- a/scripts/protocversioncheck.sh +++ b/scripts/protocversioncheck.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -euo pipefail diff --git a/scripts/semgrep_plugin_repos.sh b/scripts/semgrep_plugin_repos.sh index 6dc7407320caa1..41f6dfd7a158aa 100755 --- a/scripts/semgrep_plugin_repos.sh +++ b/scripts/semgrep_plugin_repos.sh @@ -1,7 +1,4 @@ #!/bin/sh -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e set -x diff --git a/scripts/testciphers.sh b/scripts/testciphers.sh index f9684f570bba9e..324d6bce7e02a9 100755 --- a/scripts/testciphers.sh +++ b/scripts/testciphers.sh @@ -1,7 +1,4 @@ #!/usr/bin/env bash -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # Adapted from https://superuser.com/a/224263 diff --git a/scripts/update_deps.sh b/scripts/update_deps.sh index f491b7e69bc404..35f0fecdf1b573 100755 --- a/scripts/update_deps.sh +++ b/scripts/update_deps.sh @@ -1,7 +1,4 @@ #!/bin/sh -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/scripts/update_plugin_modules.sh b/scripts/update_plugin_modules.sh index 2a300f3bc84616..ae87fd8d6a5bf6 100755 --- a/scripts/update_plugin_modules.sh +++ b/scripts/update_plugin_modules.sh @@ -1,7 +1,4 @@ #!/bin/sh -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - set -e diff --git a/sdk/database/dbplugin/client.go b/sdk/database/dbplugin/client.go index 265b46b6108a53..c30c86d0c91094 100644 --- a/sdk/database/dbplugin/client.go +++ b/sdk/database/dbplugin/client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/database.pb.go b/sdk/database/dbplugin/database.pb.go index 62964c7d15d624..36d347151c8801 100644 --- a/sdk/database/dbplugin/database.pb.go +++ b/sdk/database/dbplugin/database.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/sdk/database/dbplugin/database.proto b/sdk/database/dbplugin/database.proto index ed292527898985..d8c208099b3680 100644 --- a/sdk/database/dbplugin/database.proto +++ b/sdk/database/dbplugin/database.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/sdk/database/dbplugin"; diff --git a/sdk/database/dbplugin/databasemiddleware.go b/sdk/database/dbplugin/databasemiddleware.go index d7cabafefe29b4..29c806113844b7 100644 --- a/sdk/database/dbplugin/databasemiddleware.go +++ b/sdk/database/dbplugin/databasemiddleware.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/grpc_transport.go b/sdk/database/dbplugin/grpc_transport.go index 3740ef59c3b813..fbae626df39734 100644 --- a/sdk/database/dbplugin/grpc_transport.go +++ b/sdk/database/dbplugin/grpc_transport.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/plugin.go b/sdk/database/dbplugin/plugin.go index 0b01454123c82d..29f2f1f898b899 100644 --- a/sdk/database/dbplugin/plugin.go +++ b/sdk/database/dbplugin/plugin.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/server.go b/sdk/database/dbplugin/server.go index bf96a3bba4d915..4949384baf5653 100644 --- a/sdk/database/dbplugin/server.go +++ b/sdk/database/dbplugin/server.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/conversions_test.go b/sdk/database/dbplugin/v5/conversions_test.go index 5e65c3467068bd..6207f0f39f7f2a 100644 --- a/sdk/database/dbplugin/v5/conversions_test.go +++ b/sdk/database/dbplugin/v5/conversions_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( @@ -65,7 +62,6 @@ func TestConversionsHaveAllFields(t *testing.T) { CredentialType: CredentialTypeRSAPrivateKey, PublicKey: []byte("-----BEGIN PUBLIC KEY-----"), Password: "password", - Subject: "subject", Expiration: time.Now(), } diff --git a/sdk/database/dbplugin/v5/database.go b/sdk/database/dbplugin/v5/database.go index 065aaefd5a246c..b73bd6858dca74 100644 --- a/sdk/database/dbplugin/v5/database.go +++ b/sdk/database/dbplugin/v5/database.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( @@ -123,10 +120,6 @@ type NewUserRequest struct { // The value is set when the credential type is CredentialTypeRSAPrivateKey. PublicKey []byte - // Subject is the distinguished name for the client certificate credential. - // Value is set when the credential type is CredentialTypeClientCertificate. - Subject string - // Expiration of the user. Not all database plugins will support this. Expiration time.Time } @@ -150,7 +143,6 @@ type CredentialType int const ( CredentialTypePassword CredentialType = iota CredentialTypeRSAPrivateKey - CredentialTypeClientCertificate ) func (k CredentialType) String() string { @@ -159,8 +151,6 @@ func (k CredentialType) String() string { return "password" case CredentialTypeRSAPrivateKey: return "rsa_private_key" - case CredentialTypeClientCertificate: - return "client_certificate" default: return "unknown" } diff --git a/sdk/database/dbplugin/v5/grpc_client.go b/sdk/database/dbplugin/v5/grpc_client.go index 9b0b984f42af11..cfddfcd578efa0 100644 --- a/sdk/database/dbplugin/v5/grpc_client.go +++ b/sdk/database/dbplugin/v5/grpc_client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( @@ -104,10 +101,6 @@ func newUserReqToProto(req NewUserRequest) (*proto.NewUserRequest, error) { if len(req.PublicKey) == 0 { return nil, fmt.Errorf("missing public key credential") } - case CredentialTypeClientCertificate: - if req.Subject == "" { - return nil, fmt.Errorf("missing certificate subject") - } default: return nil, fmt.Errorf("unknown credential type") } @@ -125,7 +118,6 @@ func newUserReqToProto(req NewUserRequest) (*proto.NewUserRequest, error) { CredentialType: int32(req.CredentialType), Password: req.Password, PublicKey: req.PublicKey, - Subject: req.Subject, Expiration: expiration, Statements: &proto.Statements{ Commands: req.Statements.Commands, diff --git a/sdk/database/dbplugin/v5/grpc_client_test.go b/sdk/database/dbplugin/v5/grpc_client_test.go index 05ecb960e60a38..b187d736d80ed2 100644 --- a/sdk/database/dbplugin/v5/grpc_client_test.go +++ b/sdk/database/dbplugin/v5/grpc_client_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/grpc_database_plugin.go b/sdk/database/dbplugin/v5/grpc_database_plugin.go index b428d4ce06eff8..441030df93e0bf 100644 --- a/sdk/database/dbplugin/v5/grpc_database_plugin.go +++ b/sdk/database/dbplugin/v5/grpc_database_plugin.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/grpc_server.go b/sdk/database/dbplugin/v5/grpc_server.go index 7e1bc3fa1fc76a..ce3be1efb7c687 100644 --- a/sdk/database/dbplugin/v5/grpc_server.go +++ b/sdk/database/dbplugin/v5/grpc_server.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( @@ -152,7 +149,6 @@ func (g *gRPCServer) NewUser(ctx context.Context, req *proto.NewUserRequest) (*p CredentialType: CredentialType(req.GetCredentialType()), Password: req.GetPassword(), PublicKey: req.GetPublicKey(), - Subject: req.GetSubject(), Expiration: expiration, Statements: getStatementsFromProto(req.GetStatements()), RollbackStatements: getStatementsFromProto(req.GetRollbackStatements()), diff --git a/sdk/database/dbplugin/v5/grpc_server_test.go b/sdk/database/dbplugin/v5/grpc_server_test.go index 53d44c7c2a6564..7399bf55789bcc 100644 --- a/sdk/database/dbplugin/v5/grpc_server_test.go +++ b/sdk/database/dbplugin/v5/grpc_server_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/marshalling.go b/sdk/database/dbplugin/v5/marshalling.go index 2b3e8cb346acf2..e14a21e58335d8 100644 --- a/sdk/database/dbplugin/v5/marshalling.go +++ b/sdk/database/dbplugin/v5/marshalling.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/middleware.go b/sdk/database/dbplugin/v5/middleware.go index 2091e672084a6c..240d64e6915eae 100644 --- a/sdk/database/dbplugin/v5/middleware.go +++ b/sdk/database/dbplugin/v5/middleware.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/middleware_test.go b/sdk/database/dbplugin/v5/middleware_test.go index a2a76336fb7d08..5dd97cdb9e5a49 100644 --- a/sdk/database/dbplugin/v5/middleware_test.go +++ b/sdk/database/dbplugin/v5/middleware_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/plugin_client.go b/sdk/database/dbplugin/v5/plugin_client.go index b4085ead6cb9cb..caea00a8fdaf18 100644 --- a/sdk/database/dbplugin/v5/plugin_client.go +++ b/sdk/database/dbplugin/v5/plugin_client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/plugin_client_test.go b/sdk/database/dbplugin/v5/plugin_client_test.go index 10f02b7bec259a..903cec65dcbd41 100644 --- a/sdk/database/dbplugin/v5/plugin_client_test.go +++ b/sdk/database/dbplugin/v5/plugin_client_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/plugin_factory.go b/sdk/database/dbplugin/v5/plugin_factory.go index 4b158c319e2640..649f0f3fc9448e 100644 --- a/sdk/database/dbplugin/v5/plugin_factory.go +++ b/sdk/database/dbplugin/v5/plugin_factory.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/plugin_server.go b/sdk/database/dbplugin/v5/plugin_server.go index 216219df1d7fcd..090894ae552179 100644 --- a/sdk/database/dbplugin/v5/plugin_server.go +++ b/sdk/database/dbplugin/v5/plugin_server.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbplugin import ( diff --git a/sdk/database/dbplugin/v5/proto/database.pb.go b/sdk/database/dbplugin/v5/proto/database.pb.go index f152acaa79fd81..e50f1508da4412 100644 --- a/sdk/database/dbplugin/v5/proto/database.pb.go +++ b/sdk/database/dbplugin/v5/proto/database.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 @@ -142,7 +139,6 @@ type NewUserRequest struct { RollbackStatements *Statements `protobuf:"bytes,5,opt,name=rollback_statements,json=rollbackStatements,proto3" json:"rollback_statements,omitempty"` CredentialType int32 `protobuf:"varint,6,opt,name=credential_type,json=credentialType,proto3" json:"credential_type,omitempty"` PublicKey []byte `protobuf:"bytes,7,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty"` - Subject string `protobuf:"bytes,8,opt,name=subject,proto3" json:"subject,omitempty"` } func (x *NewUserRequest) Reset() { @@ -226,13 +222,6 @@ func (x *NewUserRequest) GetPublicKey() []byte { return nil } -func (x *NewUserRequest) GetSubject() string { - if x != nil { - return x.Subject - } - return "" -} - type UsernameConfig struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -877,7 +866,7 @@ var file_sdk_database_dbplugin_v5_proto_database_proto_rawDesc = []byte{ 0x0b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x0a, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x44, 0x61, 0x74, 0x61, 0x22, 0x93, 0x03, 0x0a, 0x0e, 0x4e, 0x65, 0x77, 0x55, + 0x66, 0x69, 0x67, 0x44, 0x61, 0x74, 0x61, 0x22, 0xf9, 0x02, 0x0a, 0x0e, 0x4e, 0x65, 0x77, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x44, 0x0a, 0x0f, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, @@ -901,102 +890,100 @@ var file_sdk_database_dbplugin_v5_proto_database_proto_rawDesc = []byte{ 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, - 0x4b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x08, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x50, 0x0a, - 0x0e, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, - 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x6f, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, - 0x2d, 0x0a, 0x0f, 0x4e, 0x65, 0x77, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x8d, - 0x02, 0x0a, 0x11, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x37, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, - 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x52, - 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x3d, 0x0a, 0x0a, 0x65, 0x78, 0x70, - 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, - 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x43, 0x68, 0x61, 0x6e, - 0x67, 0x65, 0x45, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x65, 0x78, - 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, - 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x64, - 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, - 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, - 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x0f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, - 0x69, 0x61, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, - 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x54, 0x79, 0x70, 0x65, 0x22, 0x6c, - 0x0a, 0x0e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, - 0x12, 0x21, 0x0a, 0x0c, 0x6e, 0x65, 0x77, 0x5f, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6e, 0x65, 0x77, 0x50, 0x61, 0x73, 0x73, 0x77, - 0x6f, 0x72, 0x64, 0x12, 0x37, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, - 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x70, 0x0a, 0x0f, - 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, - 0x24, 0x0a, 0x0e, 0x6e, 0x65, 0x77, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x6e, 0x65, 0x77, 0x50, 0x75, 0x62, 0x6c, - 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x37, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x64, 0x62, 0x70, 0x6c, - 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x8e, - 0x01, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x45, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x41, 0x0a, 0x0e, 0x6e, 0x65, 0x77, 0x5f, 0x65, 0x78, 0x70, 0x69, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x6e, 0x65, 0x77, 0x45, 0x78, 0x70, 0x69, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x37, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x64, 0x62, 0x70, - 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, - 0x14, 0x0a, 0x12, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x68, 0x0a, 0x11, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, + 0x4b, 0x65, 0x79, 0x22, 0x50, 0x0a, 0x0e, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x6c, 0x65, + 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x6f, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x2d, 0x0a, 0x0f, 0x4e, 0x65, 0x77, 0x55, 0x73, 0x65, 0x72, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, + 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x8d, 0x02, 0x0a, 0x11, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, - 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x64, 0x62, 0x70, - 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, - 0x14, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x22, 0x0a, 0x0c, 0x54, 0x79, 0x70, 0x65, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x54, 0x79, 0x70, 0x65, 0x22, 0x28, 0x0a, 0x0a, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x43, 0x6f, 0x6d, 0x6d, 0x61, - 0x6e, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x43, 0x6f, 0x6d, 0x6d, 0x61, - 0x6e, 0x64, 0x73, 0x22, 0x07, 0x0a, 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x32, 0xa5, 0x03, 0x0a, - 0x08, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x4d, 0x0a, 0x0a, 0x49, 0x6e, 0x69, - 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x12, 0x1e, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, - 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, - 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x44, 0x0a, 0x07, 0x4e, 0x65, 0x77, 0x55, - 0x73, 0x65, 0x72, 0x12, 0x1b, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, - 0x35, 0x2e, 0x4e, 0x65, 0x77, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1c, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x4e, - 0x65, 0x77, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4d, - 0x0a, 0x0a, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x12, 0x1e, 0x2e, 0x64, - 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x64, - 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4d, 0x0a, - 0x0a, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x12, 0x1e, 0x2e, 0x64, 0x62, - 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, - 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x64, 0x62, - 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, - 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x35, 0x0a, 0x04, - 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, - 0x76, 0x35, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, - 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x05, 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x12, 0x12, 0x2e, 0x64, - 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, - 0x1a, 0x12, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x45, - 0x6d, 0x70, 0x74, 0x79, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x76, 0x61, 0x75, - 0x6c, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x2f, - 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2f, 0x76, 0x35, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, + 0x72, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, + 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x61, 0x73, + 0x73, 0x77, 0x6f, 0x72, 0x64, 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, + 0x3d, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, + 0x35, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x45, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3b, + 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, + 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, + 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x0f, 0x63, + 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, + 0x54, 0x79, 0x70, 0x65, 0x22, 0x6c, 0x0a, 0x0e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x61, + 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x6e, 0x65, 0x77, 0x5f, 0x70, 0x61, + 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6e, 0x65, + 0x77, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x37, 0x0a, 0x0a, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, + 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x22, 0x70, 0x0a, 0x0f, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x75, 0x62, 0x6c, + 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x24, 0x0a, 0x0e, 0x6e, 0x65, 0x77, 0x5f, 0x70, 0x75, 0x62, + 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x6e, + 0x65, 0x77, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x37, 0x0a, 0x0a, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x53, 0x74, + 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x22, 0x8e, 0x01, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x45, + 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x41, 0x0a, 0x0e, 0x6e, 0x65, 0x77, + 0x5f, 0x65, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x6e, + 0x65, 0x77, 0x45, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x37, 0x0a, 0x0a, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x17, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x14, 0x0a, 0x12, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x55, + 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x68, 0x0a, 0x11, 0x44, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x0a, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x17, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x14, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, + 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x22, 0x0a, 0x0c, 0x54, + 0x79, 0x70, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x54, + 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x54, 0x79, 0x70, 0x65, 0x22, + 0x28, 0x0a, 0x0a, 0x53, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1a, 0x0a, + 0x08, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x08, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x73, 0x22, 0x07, 0x0a, 0x05, 0x45, 0x6d, 0x70, + 0x74, 0x79, 0x32, 0xa5, 0x03, 0x0a, 0x08, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, + 0x4d, 0x0a, 0x0a, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x12, 0x1e, 0x2e, + 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x49, 0x6e, 0x69, 0x74, + 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, + 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x49, 0x6e, 0x69, 0x74, + 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x44, + 0x0a, 0x07, 0x4e, 0x65, 0x77, 0x55, 0x73, 0x65, 0x72, 0x12, 0x1b, 0x2e, 0x64, 0x62, 0x70, 0x6c, + 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x4e, 0x65, 0x77, 0x55, 0x73, 0x65, 0x72, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, + 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x4e, 0x65, 0x77, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4d, 0x0a, 0x0a, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x55, 0x73, + 0x65, 0x72, 0x12, 0x1e, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, + 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, + 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x4d, 0x0a, 0x0a, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, + 0x72, 0x12, 0x1e, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, + 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1f, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, + 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x35, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x2e, 0x64, 0x62, 0x70, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, + 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x54, 0x79, 0x70, + 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x05, 0x43, 0x6c, 0x6f, + 0x73, 0x65, 0x12, 0x12, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2e, 0x76, 0x35, + 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x12, 0x2e, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, + 0x6e, 0x2e, 0x76, 0x35, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, + 0x72, 0x70, 0x2f, 0x76, 0x61, 0x75, 0x6c, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x64, 0x61, 0x74, + 0x61, 0x62, 0x61, 0x73, 0x65, 0x2f, 0x64, 0x62, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2f, 0x76, + 0x35, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/database/dbplugin/v5/proto/database.proto b/sdk/database/dbplugin/v5/proto/database.proto index d6f877b6a549f5..b4959f709e1f9e 100644 --- a/sdk/database/dbplugin/v5/proto/database.proto +++ b/sdk/database/dbplugin/v5/proto/database.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; package dbplugin.v5; @@ -32,7 +29,6 @@ message NewUserRequest { Statements rollback_statements = 5; int32 credential_type = 6; bytes public_key = 7; - string subject = 8; } message UsernameConfig { diff --git a/sdk/database/dbplugin/v5/testing/test_helpers.go b/sdk/database/dbplugin/v5/testing/test_helpers.go index 83e4af3089ce3f..55a402c7fe3c88 100644 --- a/sdk/database/dbplugin/v5/testing/test_helpers.go +++ b/sdk/database/dbplugin/v5/testing/test_helpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbtesting import ( diff --git a/sdk/database/helper/connutil/connutil.go b/sdk/database/helper/connutil/connutil.go index 50582aa8196a94..1749b275a26081 100644 --- a/sdk/database/helper/connutil/connutil.go +++ b/sdk/database/helper/connutil/connutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package connutil import ( diff --git a/sdk/database/helper/connutil/sql.go b/sdk/database/helper/connutil/sql.go index d1af4808cb44f6..6256ff1a4cf013 100644 --- a/sdk/database/helper/connutil/sql.go +++ b/sdk/database/helper/connutil/sql.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package connutil import ( diff --git a/sdk/database/helper/connutil/sql_test.go b/sdk/database/helper/connutil/sql_test.go index 9f29d4ae2c5ab8..2ca11b75898604 100644 --- a/sdk/database/helper/connutil/sql_test.go +++ b/sdk/database/helper/connutil/sql_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package connutil import ( diff --git a/sdk/database/helper/credsutil/credsutil.go b/sdk/database/helper/credsutil/credsutil.go index 503999c868b3fa..064552d1fa9fb2 100644 --- a/sdk/database/helper/credsutil/credsutil.go +++ b/sdk/database/helper/credsutil/credsutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package credsutil import ( diff --git a/sdk/database/helper/credsutil/credsutil_test.go b/sdk/database/helper/credsutil/credsutil_test.go index 77e1a2862f3c22..e094719d079744 100644 --- a/sdk/database/helper/credsutil/credsutil_test.go +++ b/sdk/database/helper/credsutil/credsutil_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package credsutil import ( diff --git a/sdk/database/helper/credsutil/sql.go b/sdk/database/helper/credsutil/sql.go index 2c27adf37cc127..39fb467a79bf95 100644 --- a/sdk/database/helper/credsutil/sql.go +++ b/sdk/database/helper/credsutil/sql.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package credsutil import ( diff --git a/sdk/database/helper/credsutil/usernames.go b/sdk/database/helper/credsutil/usernames.go index 962208ac9a662c..c1e3ccb5298edd 100644 --- a/sdk/database/helper/credsutil/usernames.go +++ b/sdk/database/helper/credsutil/usernames.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package credsutil import ( diff --git a/sdk/database/helper/credsutil/usernames_test.go b/sdk/database/helper/credsutil/usernames_test.go index a3e883491fc28b..b1e79ce26d6ed5 100644 --- a/sdk/database/helper/credsutil/usernames_test.go +++ b/sdk/database/helper/credsutil/usernames_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package credsutil import ( diff --git a/sdk/database/helper/dbutil/dbutil.go b/sdk/database/helper/dbutil/dbutil.go index efc7e01e13f025..19198bcfdddd70 100644 --- a/sdk/database/helper/dbutil/dbutil.go +++ b/sdk/database/helper/dbutil/dbutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbutil import ( diff --git a/sdk/database/helper/dbutil/dbutil_test.go b/sdk/database/helper/dbutil/dbutil_test.go index 797712b4d902a8..64ca9924d390d4 100644 --- a/sdk/database/helper/dbutil/dbutil_test.go +++ b/sdk/database/helper/dbutil/dbutil_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbutil import ( diff --git a/sdk/framework/backend.go b/sdk/framework/backend.go index c0527addf999d5..67ad2a8edb3a29 100644 --- a/sdk/framework/backend.go +++ b/sdk/framework/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( @@ -755,8 +752,6 @@ func (t FieldType) Zero() interface{} { return "" case TypeInt: return 0 - case TypeInt64: - return int64(0) case TypeBool: return false case TypeMap: diff --git a/sdk/framework/backend_test.go b/sdk/framework/backend_test.go index 0b7a2054373d6c..9a2b5941457afa 100644 --- a/sdk/framework/backend_test.go +++ b/sdk/framework/backend_test.go @@ -1,11 +1,7 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( "context" - "fmt" "net/http" "reflect" "strings" @@ -815,23 +811,3 @@ func TestInitializeBackend(t *testing.T) { t.Fatal("backend should be open") } } - -// TestFieldTypeMethods tries to ensure our switch-case statements for the -// FieldType "enum" are complete. -func TestFieldTypeMethods(t *testing.T) { - unknownFormat := convertType(TypeInvalid).format - - for i := TypeInvalid + 1; i < typeInvalidMax; i++ { - t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { - if i.String() == TypeInvalid.String() { - t.Errorf("unknown type string for %d", i) - } - - if convertType(i).format == unknownFormat { - t.Errorf("unknown schema for %d", i) - } - - _ = i.Zero() - }) - } -} diff --git a/sdk/framework/field_data.go b/sdk/framework/field_data.go index e5f69acdb81ad1..d9e6fa365f1d1f 100644 --- a/sdk/framework/field_data.go +++ b/sdk/framework/field_data.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/field_data_test.go b/sdk/framework/field_data_test.go index 078c6fbcd5b8e9..b152e84fa0f2ad 100644 --- a/sdk/framework/field_data_test.go +++ b/sdk/framework/field_data_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/field_type.go b/sdk/framework/field_type.go index ee07b6afe86643..ef7f08191e1a3d 100644 --- a/sdk/framework/field_type.go +++ b/sdk/framework/field_type.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework // FieldType is the enum of types that a field can be. @@ -61,15 +58,11 @@ const ( // TypeFloat parses both float32 and float64 values TypeFloat - // TypeTime represents absolute time. It accepts an RFC3339-formatted + // TypeTime represents absolute time. It accepts an RFC3999-formatted // string (with or without fractional seconds), or an epoch timestamp // formatted as a string or a number. The resulting time.Time // is converted to UTC. TypeTime - - // DO NOT USE. Any new values must be inserted before this value. - // Used to write tests that ensure type methods handle all possible values. - typeInvalidMax ) func (t FieldType) String() string { @@ -82,8 +75,6 @@ func (t FieldType) String() string { return "name string" case TypeInt: return "int" - case TypeInt64: - return "int64" case TypeBool: return "bool" case TypeMap: diff --git a/sdk/framework/filter.go b/sdk/framework/filter.go index b9b99799b9169f..faaccba2a864c3 100644 --- a/sdk/framework/filter.go +++ b/sdk/framework/filter.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/identity.go b/sdk/framework/identity.go index 157f3c193cf42f..ebb2aa4dcc6c01 100644 --- a/sdk/framework/identity.go +++ b/sdk/framework/identity.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/identity_test.go b/sdk/framework/identity_test.go index 1667fb96063603..cb71eefdf9d895 100644 --- a/sdk/framework/identity_test.go +++ b/sdk/framework/identity_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/lease.go b/sdk/framework/lease.go index 24824ca52a24ba..4d0240fbe7fd83 100644 --- a/sdk/framework/lease.go +++ b/sdk/framework/lease.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/lease_test.go b/sdk/framework/lease_test.go index 5d1f9f091bbc5c..e145c2a82b78ba 100644 --- a/sdk/framework/lease_test.go +++ b/sdk/framework/lease_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/openapi.go b/sdk/framework/openapi.go index 663386a179d64b..f8df911029c9ff 100644 --- a/sdk/framework/openapi.go +++ b/sdk/framework/openapi.go @@ -244,7 +244,7 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st } } - for pathIndex, path := range paths { + for _, path := range paths { // Construct a top level PathItem which will be populated as the path is processed. pi := OASPathItem{ Description: cleanString(p.HelpSynopsis), @@ -252,7 +252,7 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st pi.Sudo = specialPathMatch(path, sudoPaths) pi.Unauthenticated = specialPathMatch(path, unauthPaths) - pi.DisplayAttrs = withoutOperationHints(p.DisplayAttrs) + pi.DisplayAttrs = p.DisplayAttrs // If the newer style Operations map isn't defined, create one from the legacy fields. operations := p.Operations @@ -294,7 +294,7 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st Pattern: t.pattern, Enum: field.AllowedValues, Default: field.Default, - DisplayAttrs: withoutOperationHints(field.DisplayAttrs), + DisplayAttrs: field.DisplayAttrs, }, Required: required, Deprecated: field.Deprecated, @@ -331,19 +331,9 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st op := NewOASOperation() - operationID := constructOperationID( - path, - pathIndex, - p.DisplayAttrs, - opType, - props.DisplayAttrs, - requestResponsePrefix, - ) - op.Summary = props.Summary op.Description = props.Description op.Deprecated = props.Deprecated - op.OperationID = operationID // Add any fields not present in the path as body parameters for POST. if opType == logical.CreateOperation || opType == logical.UpdateOperation { @@ -373,7 +363,7 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st Enum: field.AllowedValues, Default: field.Default, Deprecated: field.Deprecated, - DisplayAttrs: withoutOperationHints(field.DisplayAttrs), + DisplayAttrs: field.DisplayAttrs, } if openapiField.baseType == "array" { p.Items = &OASSchema{ @@ -395,7 +385,7 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st // Set the final request body. Only JSON request data is supported. if len(s.Properties) > 0 || s.Example != nil { - requestName := hyphenatedToTitleCase(operationID) + "Request" + requestName := constructRequestResponseName(path, requestResponsePrefix, "Request") doc.Components.Schemas[requestName] = s op.RequestBody = &OASRequestBody{ Required: true, @@ -491,7 +481,7 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st Enum: field.AllowedValues, Default: field.Default, Deprecated: field.Deprecated, - DisplayAttrs: withoutOperationHints(field.DisplayAttrs), + DisplayAttrs: field.DisplayAttrs, } if openapiField.baseType == "array" { p.Items = &OASSchema{ @@ -502,7 +492,7 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st } if len(resp.Fields) != 0 { - responseName := hyphenatedToTitleCase(operationID) + "Response" + responseName := constructRequestResponseName(path, requestResponsePrefix, "Response") doc.Components.Schemas[responseName] = responseSchema content = OASContent{ "application/json": &OASMediaTypeObject{ @@ -534,6 +524,33 @@ func documentPath(p *Path, specialPaths *logical.Paths, requestResponsePrefix st return nil } +// constructRequestResponseName joins the given path with prefix & suffix into +// a CamelCase request or response name. +// +// For example, path=/config/lease/{name}, prefix="secret", suffix="request" +// will result in "SecretConfigLeaseRequest" +func constructRequestResponseName(path, prefix, suffix string) string { + var b strings.Builder + + title := cases.Title(language.English) + + b.WriteString(title.String(prefix)) + + // split the path by / _ - separators + for _, token := range strings.FieldsFunc(path, func(r rune) bool { + return r == '/' || r == '_' || r == '-' + }) { + // exclude request fields + if !strings.ContainsAny(token, "{}") { + b.WriteString(title.String(token)) + } + } + + b.WriteString(suffix) + + return b.String() +} + // specialPathMatch checks whether the given path matches one of the special // paths, taking into account * and + wildcards (e.g. foo/+/bar/*) func specialPathMatch(path string, specialPaths []string) bool { @@ -586,117 +603,6 @@ func specialPathMatch(path string, specialPaths []string) bool { return false } -// constructOperationID joins the given inputs into a hyphen-separated -// lower-case operation id, which is also used as a prefix for request and -// response names. -// -// The OperationPrefix / -Verb / -Suffix found in display attributes will be -// used, if provided. Otherwise, the function falls back to using the path and -// the operation. -// -// Examples of generated operation identifiers: -// - kvv2-write -// - kvv2-read -// - google-cloud-login -// - google-cloud-write-role -func constructOperationID( - path string, - pathIndex int, - pathAttributes *DisplayAttributes, - operation logical.Operation, - operationAttributes *DisplayAttributes, - defaultPrefix string, -) string { - var ( - prefix string - verb string - suffix string - ) - - if operationAttributes != nil { - prefix = operationAttributes.OperationPrefix - verb = operationAttributes.OperationVerb - suffix = operationAttributes.OperationSuffix - } - - if pathAttributes != nil { - if prefix == "" { - prefix = pathAttributes.OperationPrefix - } - if verb == "" { - verb = pathAttributes.OperationVerb - } - if suffix == "" { - suffix = pathAttributes.OperationSuffix - } - } - - // A single suffix string can contain multiple pipe-delimited strings. To - // determine the actual suffix, we attempt to match it by the index of the - // paths returned from `expandPattern(...)`. For example: - // - // pki/ - // Pattern: "keys/generate/(internal|exported|kms)", - // DisplayAttrs: { - // ... - // OperationSuffix: "internal-key|exported-key|kms-key", - // }, - // - // will expand into three paths and corresponding suffixes: - // - // path 0: "keys/generate/internal" suffix: internal-key - // path 1: "keys/generate/exported" suffix: exported-key - // path 2: "keys/generate/kms" suffix: kms-key - // - pathIndexOutOfRange := false - - if suffixes := strings.Split(suffix, "|"); len(suffixes) > 1 || pathIndex > 0 { - // if the index is out of bounds, fall back to the old logic - if pathIndex >= len(suffixes) { - suffix = "" - pathIndexOutOfRange = true - } else { - suffix = suffixes[pathIndex] - } - } - - // a helper that hyphenates & lower-cases the slice except the empty elements - toLowerHyphenate := func(parts []string) string { - filtered := make([]string, 0, len(parts)) - for _, e := range parts { - if e != "" { - filtered = append(filtered, e) - } - } - return strings.ToLower(strings.Join(filtered, "-")) - } - - // fall back to using the path + operation to construct the operation id - var ( - needPrefix = prefix == "" && verb == "" - needVerb = verb == "" - needSuffix = suffix == "" && (verb == "" || pathIndexOutOfRange) - ) - - if needPrefix { - prefix = defaultPrefix - } - - if needVerb { - if operation == logical.UpdateOperation { - verb = "write" - } else { - verb = string(operation) - } - } - - if needSuffix { - suffix = toLowerHyphenate(nonWordRe.Split(path, -1)) - } - - return toLowerHyphenate([]string{prefix, verb, suffix}) -} - // expandPattern expands a regex pattern by generating permutations of any optional parameters // and changing named parameters into their {openapi} equivalents. func expandPattern(pattern string) ([]string, error) { @@ -988,40 +894,6 @@ func splitFields(allFields map[string]*FieldSchema, pattern string) (pathFields, return pathFields, bodyFields } -// withoutOperationHints returns a copy of the given DisplayAttributes without -// OperationPrefix / OperationVerb / OperationSuffix since we don't need these -// fields in the final output. -func withoutOperationHints(in *DisplayAttributes) *DisplayAttributes { - if in == nil { - return nil - } - - copy := *in - - copy.OperationPrefix = "" - copy.OperationVerb = "" - copy.OperationSuffix = "" - - // return nil if all fields are empty to avoid empty JSON objects - if copy == (DisplayAttributes{}) { - return nil - } - - return © -} - -func hyphenatedToTitleCase(in string) string { - var b strings.Builder - - title := cases.Title(language.English, cases.NoLower) - - for _, word := range strings.Split(in, "-") { - b.WriteString(title.String(word)) - } - - return b.String() -} - // cleanedResponse is identical to logical.Response but with nulls // removed from from JSON encoding type cleanedResponse struct { @@ -1056,9 +928,6 @@ func cleanResponse(resp *logical.Response) *cleanedResponse { // postSysToolsRandomUrlbytes_2 // // An optional user-provided suffix ("context") may also be appended. -// -// Deprecated: operationID's are now populated using `constructOperationID`. -// This function is here for backwards compatibility with older plugins. func (d *OASDocument) CreateOperationIDs(context string) { opIDCount := make(map[string]int) var paths []string @@ -1086,10 +955,6 @@ func (d *OASDocument) CreateOperationIDs(context string) { continue } - if oasOperation.OperationID != "" { - continue - } - // Discard "_mount_path" from any {thing_mount_path} parameters path = strings.Replace(path, "_mount_path", "", 1) diff --git a/sdk/framework/openapi_test.go b/sdk/framework/openapi_test.go index 9e2763f1241e03..50f9bba1702810 100644 --- a/sdk/framework/openapi_test.go +++ b/sdk/framework/openapi_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( @@ -564,6 +561,66 @@ func TestOpenAPI_Paths(t *testing.T) { }) } +func TestOpenAPI_OperationID(t *testing.T) { + path1 := &Path{ + Pattern: "foo/" + GenericNameRegex("id"), + Fields: map[string]*FieldSchema{ + "id": {Type: TypeString}, + }, + Operations: map[logical.Operation]OperationHandler{ + logical.ReadOperation: &PathOperation{}, + logical.UpdateOperation: &PathOperation{}, + logical.DeleteOperation: &PathOperation{}, + }, + } + + path2 := &Path{ + Pattern: "Foo/" + GenericNameRegex("id"), + Fields: map[string]*FieldSchema{ + "id": {Type: TypeString}, + }, + Operations: map[logical.Operation]OperationHandler{ + logical.ReadOperation: &PathOperation{}, + }, + } + + for _, context := range []string{"", "bar"} { + doc := NewOASDocument("version") + err := documentPath(path1, nil, "kv", logical.TypeLogical, doc) + if err != nil { + t.Fatal(err) + } + err = documentPath(path2, nil, "kv", logical.TypeLogical, doc) + if err != nil { + t.Fatal(err) + } + doc.CreateOperationIDs(context) + + tests := []struct { + path string + op string + opID string + }{ + {"/Foo/{id}", "get", "getFooId"}, + {"/foo/{id}", "get", "getFooId_2"}, + {"/foo/{id}", "post", "postFooId"}, + {"/foo/{id}", "delete", "deleteFooId"}, + } + + for _, test := range tests { + actual := getPathOp(doc.Paths[test.path], test.op).OperationID + expected := test.opID + if context != "" { + expected += "_" + context + } + + if actual != expected { + t.Fatalf("expected %v, got %v", expected, actual) + } + } + } +} + func TestOpenAPI_CustomDecoder(t *testing.T) { p := &Path{ Pattern: "foo", @@ -652,216 +709,6 @@ func TestOpenAPI_CleanResponse(t *testing.T) { } } -func TestOpenAPI_constructOperationID(t *testing.T) { - tests := map[string]struct { - path string - pathIndex int - pathAttributes *DisplayAttributes - operation logical.Operation - operationAttributes *DisplayAttributes - defaultPrefix string - expected string - }{ - "empty": { - path: "", - pathIndex: 0, - pathAttributes: nil, - operation: logical.Operation(""), - operationAttributes: nil, - defaultPrefix: "", - expected: "", - }, - "simple-read": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: nil, - operation: logical.ReadOperation, - operationAttributes: nil, - defaultPrefix: "test", - expected: "test-read-path-to-thing", - }, - "simple-write": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: nil, - operation: logical.UpdateOperation, - operationAttributes: nil, - defaultPrefix: "test", - expected: "test-write-path-to-thing", - }, - "operation-verb": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: &DisplayAttributes{OperationVerb: "do-something"}, - operation: logical.UpdateOperation, - operationAttributes: nil, - defaultPrefix: "test", - expected: "do-something", - }, - "operation-verb-override": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: &DisplayAttributes{OperationVerb: "do-something"}, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationVerb: "do-something-else"}, - defaultPrefix: "test", - expected: "do-something-else", - }, - "operation-prefix": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: &DisplayAttributes{OperationPrefix: "my-prefix"}, - operation: logical.UpdateOperation, - operationAttributes: nil, - defaultPrefix: "test", - expected: "my-prefix-write-path-to-thing", - }, - "operation-prefix-override": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: &DisplayAttributes{OperationPrefix: "my-prefix"}, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationPrefix: "better-prefix"}, - defaultPrefix: "test", - expected: "better-prefix-write-path-to-thing", - }, - "operation-prefix-and-suffix": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: &DisplayAttributes{OperationPrefix: "my-prefix", OperationSuffix: "my-suffix"}, - operation: logical.UpdateOperation, - operationAttributes: nil, - defaultPrefix: "test", - expected: "my-prefix-write-my-suffix", - }, - "operation-prefix-and-suffix-override": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: &DisplayAttributes{OperationPrefix: "my-prefix", OperationSuffix: "my-suffix"}, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationPrefix: "better-prefix", OperationSuffix: "better-suffix"}, - defaultPrefix: "test", - expected: "better-prefix-write-better-suffix", - }, - "operation-prefix-verb-suffix": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: &DisplayAttributes{OperationPrefix: "my-prefix", OperationSuffix: "my-suffix", OperationVerb: "Create"}, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationPrefix: "better-prefix", OperationSuffix: "better-suffix"}, - defaultPrefix: "test", - expected: "better-prefix-create-better-suffix", - }, - "operation-prefix-verb-suffix-override": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: &DisplayAttributes{OperationPrefix: "my-prefix", OperationSuffix: "my-suffix", OperationVerb: "Create"}, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationPrefix: "better-prefix", OperationSuffix: "better-suffix", OperationVerb: "Login"}, - defaultPrefix: "test", - expected: "better-prefix-login-better-suffix", - }, - "operation-prefix-verb": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: nil, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationPrefix: "better-prefix", OperationVerb: "Login"}, - defaultPrefix: "test", - expected: "better-prefix-login", - }, - "operation-verb-suffix": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: nil, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationVerb: "Login", OperationSuffix: "better-suffix"}, - defaultPrefix: "test", - expected: "login-better-suffix", - }, - "pipe-delimited-suffix-0": { - path: "path/to/thing", - pathIndex: 0, - pathAttributes: nil, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationPrefix: "better-prefix", OperationSuffix: "suffix0|suffix1"}, - defaultPrefix: "test", - expected: "better-prefix-write-suffix0", - }, - "pipe-delimited-suffix-1": { - path: "path/to/thing", - pathIndex: 1, - pathAttributes: nil, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationPrefix: "better-prefix", OperationSuffix: "suffix0|suffix1"}, - defaultPrefix: "test", - expected: "better-prefix-write-suffix1", - }, - "pipe-delimited-suffix-2-fallback": { - path: "path/to/thing", - pathIndex: 2, - pathAttributes: nil, - operation: logical.UpdateOperation, - operationAttributes: &DisplayAttributes{OperationPrefix: "better-prefix", OperationSuffix: "suffix0|suffix1"}, - defaultPrefix: "test", - expected: "better-prefix-write-path-to-thing", - }, - } - - for name, test := range tests { - name, test := name, test - t.Run(name, func(t *testing.T) { - t.Parallel() - actual := constructOperationID( - test.path, - test.pathIndex, - test.pathAttributes, - test.operation, - test.operationAttributes, - test.defaultPrefix, - ) - if actual != test.expected { - t.Fatalf("expected: %s; got: %s", test.expected, actual) - } - }) - } -} - -func TestOpenAPI_hyphenatedToTitleCase(t *testing.T) { - tests := map[string]struct { - in string - expected string - }{ - "simple": { - in: "test", - expected: "Test", - }, - "two-words": { - in: "two-words", - expected: "TwoWords", - }, - "three-words": { - in: "one-two-three", - expected: "OneTwoThree", - }, - "not-hyphenated": { - in: "something_like_this", - expected: "Something_like_this", - }, - } - - for name, test := range tests { - name, test := name, test - t.Run(name, func(t *testing.T) { - t.Parallel() - actual := hyphenatedToTitleCase(test.in) - if actual != test.expected { - t.Fatalf("expected: %s; got: %s", test.expected, actual) - } - }) - } -} - func testPath(t *testing.T, path *Path, sp *logical.Paths, expectedJSON string) { t.Helper() diff --git a/sdk/framework/path.go b/sdk/framework/path.go index e96ba4c41bb295..e6221af92d52ff 100644 --- a/sdk/framework/path.go +++ b/sdk/framework/path.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( @@ -207,11 +204,6 @@ type DisplayAttributes struct { // Name is the name of the field suitable as a label or documentation heading. Name string `json:"name,omitempty"` - // Description of the field that renders as tooltip help text beside the label (name) in the UI. - // This may be used to replace descriptions that reference comma separation but correspond - // to UI inputs where only arrays are valid. For example params with Type: framework.TypeCommaStringSlice - Description string `json:"description,omitempty"` - // Value is a sample value to display for this field. This may be used // to indicate a default value, but it is for display only and completely separate // from any Default member handling. @@ -232,28 +224,6 @@ type DisplayAttributes struct { // Action is the verb to use for the operation. Action string `json:"action,omitempty"` - // OperationPrefix is a hyphenated lower-case string used to construct - // OpenAPI OperationID (prefix + verb + suffix). OperationPrefix is - // typically a human-readable name of the plugin or a prefix shared by - // multiple related endpoints. - OperationPrefix string `json:"operationPrefix,omitempty"` - - // OperationVerb is a hyphenated lower-case string used to construct - // OpenAPI OperationID (prefix + verb + suffix). OperationVerb is typically - // an action to be performed (e.g. "generate", "sign", "login", etc.). If - // not specified, the verb defaults to `logical.Operation.String()` - // (e.g. "read", "list", "delete", "write" for Create/Update) - OperationVerb string `json:"operationVerb,omitempty"` - - // OperationSuffix is a hyphenated lower-case string used to construct - // OpenAPI OperationID (prefix + verb + suffix). It is typically the name - // of the resource on which the action is performed (e.g. "role", - // "credentials", etc.). A pipe (|) separator can be used to list different - // suffixes for various permutations of the `Path.Pattern` regular - // expression. If not specified, the suffix defaults to the `Path.Pattern` - // split by dashes. - OperationSuffix string `json:"operationSuffix,omitempty"` - // EditType is the optional type of form field needed for a property // This is only necessary for a "textarea" or "file" EditType string `json:"editType,omitempty"` @@ -288,7 +258,6 @@ type PathOperation struct { Deprecated bool ForwardPerformanceSecondary bool ForwardPerformanceStandby bool - DisplayAttrs *DisplayAttributes } func (p *PathOperation) Handler() OperationFunc { @@ -305,7 +274,6 @@ func (p *PathOperation) Properties() OperationProperties { Deprecated: p.Deprecated, ForwardPerformanceSecondary: p.ForwardPerformanceSecondary, ForwardPerformanceStandby: p.ForwardPerformanceStandby, - DisplayAttrs: p.DisplayAttrs, } } diff --git a/sdk/framework/path_map.go b/sdk/framework/path_map.go index 46cf4720e96e0b..0cba8ea2fb16c6 100644 --- a/sdk/framework/path_map.go +++ b/sdk/framework/path_map.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/path_map_test.go b/sdk/framework/path_map_test.go index 3fe6308cbcd249..11e1f37c9d54c4 100644 --- a/sdk/framework/path_map_test.go +++ b/sdk/framework/path_map_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/path_struct.go b/sdk/framework/path_struct.go index cba855065ea20c..2a2848e5850844 100644 --- a/sdk/framework/path_struct.go +++ b/sdk/framework/path_struct.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/path_struct_test.go b/sdk/framework/path_struct_test.go index 88662af5300da4..9e81cc2e301c3a 100644 --- a/sdk/framework/path_struct_test.go +++ b/sdk/framework/path_struct_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/path_test.go b/sdk/framework/path_test.go index 4541930ed591a2..ca359d1f577634 100644 --- a/sdk/framework/path_test.go +++ b/sdk/framework/path_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/policy_map.go b/sdk/framework/policy_map.go index 94accf88ae74cd..7befb399545cd8 100644 --- a/sdk/framework/policy_map.go +++ b/sdk/framework/policy_map.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/policy_map_test.go b/sdk/framework/policy_map_test.go index b785fddc783c3f..6a88b80511850f 100644 --- a/sdk/framework/policy_map_test.go +++ b/sdk/framework/policy_map_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/secret.go b/sdk/framework/secret.go index 095bc12b7246a0..0c8f0dfcccdf6f 100644 --- a/sdk/framework/secret.go +++ b/sdk/framework/secret.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/secret_test.go b/sdk/framework/secret_test.go index 29058dc84e3afc..83af4753b6d989 100644 --- a/sdk/framework/secret_test.go +++ b/sdk/framework/secret_test.go @@ -1,4 +1 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework diff --git a/sdk/framework/template.go b/sdk/framework/template.go index d395c8f8dbd5fe..3abdd624c55e64 100644 --- a/sdk/framework/template.go +++ b/sdk/framework/template.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/testdata/legacy.json b/sdk/framework/testdata/legacy.json index 548151c6f9e60d..f526f1e2aadec7 100644 --- a/sdk/framework/testdata/legacy.json +++ b/sdk/framework/testdata/legacy.json @@ -24,11 +24,9 @@ } ], "get": { - "operationId": "kv-read-lookup-id", + "operationId": "getLookupId", "summary": "Synopsis", - "tags": [ - "secrets" - ], + "tags": ["secrets"], "responses": { "200": { "description": "OK" @@ -36,17 +34,15 @@ } }, "post": { - "operationId": "kv-write-lookup-id", + "operationId": "postLookupId", "summary": "Synopsis", - "tags": [ - "secrets" - ], + "tags": ["secrets"], "requestBody": { "required": true, "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KvWriteLookupIdRequest" + "$ref": "#/components/schemas/KvLookupRequest" } } } @@ -61,7 +57,7 @@ }, "components": { "schemas": { - "KvWriteLookupIdRequest": { + "KvLookupRequest": { "type": "object", "properties": { "token": { @@ -73,3 +69,4 @@ } } } + diff --git a/sdk/framework/testdata/operations.json b/sdk/framework/testdata/operations.json index 7fca0e2650142c..e1db6744018d31 100644 --- a/sdk/framework/testdata/operations.json +++ b/sdk/framework/testdata/operations.json @@ -37,10 +37,8 @@ } ], "get": { - "operationId": "kv-read-foo-id", - "tags": [ - "secrets" - ], + "operationId": "getFooId", + "tags": ["secrets"], "summary": "My Summary", "description": "My Description", "responses": { @@ -60,10 +58,8 @@ ] }, "post": { - "operationId": "kv-write-foo-id", - "tags": [ - "secrets" - ], + "operationId": "postFooId", + "tags": ["secrets"], "summary": "Update Summary", "description": "Update Description", "requestBody": { @@ -71,7 +67,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KvWriteFooIdRequest" + "$ref": "#/components/schemas/KvFooRequest" } } } @@ -86,11 +82,9 @@ }, "components": { "schemas": { - "KvWriteFooIdRequest": { + "KvFooRequest": { "type": "object", - "required": [ - "age" - ], + "required": ["age"], "properties": { "flavors": { "type": "array", @@ -102,11 +96,7 @@ "age": { "type": "integer", "description": "the age", - "enum": [ - 1, - 2, - 3 - ], + "enum": [1, 2, 3], "x-vault-displayAttrs": { "name": "Age", "sensitive": true, @@ -123,13 +113,9 @@ "x-abc-token": { "type": "string", "description": "a header value", - "enum": [ - "a", - "b", - "c" - ] + "enum": ["a", "b", "c"] }, - "maximum": { + "maximum" : { "type": "integer", "description": "a maximum value", "format": "int64" diff --git a/sdk/framework/testdata/operations_list.json b/sdk/framework/testdata/operations_list.json index a08208b24fa44d..e89622a3c40a16 100644 --- a/sdk/framework/testdata/operations_list.json +++ b/sdk/framework/testdata/operations_list.json @@ -36,10 +36,8 @@ } ], "get": { - "operationId": "kv-list-foo-id", - "tags": [ - "secrets" - ], + "operationId": "getFooId", + "tags": ["secrets"], "summary": "List Summary", "description": "List Description", "responses": { @@ -55,9 +53,7 @@ "in": "query", "schema": { "type": "string", - "enum": [ - "true" - ] + "enum": ["true"] } } ] @@ -65,6 +61,7 @@ } }, "components": { - "schemas": {} + "schemas": { + } } } diff --git a/sdk/framework/testdata/responses.json b/sdk/framework/testdata/responses.json index 98d501ec5e896c..4e442cfb49ba4b 100644 --- a/sdk/framework/testdata/responses.json +++ b/sdk/framework/testdata/responses.json @@ -14,10 +14,8 @@ "description": "Synopsis", "x-vault-unauthenticated": true, "delete": { - "operationId": "kv-delete-foo", - "tags": [ - "secrets" - ], + "operationId": "deleteFoo", + "tags": ["secrets"], "summary": "Delete stuff", "responses": { "204": { @@ -26,10 +24,8 @@ } }, "get": { - "operationId": "kv-read-foo", - "tags": [ - "secrets" - ], + "operationId": "getFoo", + "tags": ["secrets"], "summary": "My Summary", "description": "My Description", "responses": { @@ -38,7 +34,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/KvReadFooResponse" + "$ref": "#/components/schemas/KvFooResponse" } } } @@ -49,7 +45,7 @@ }, "components": { "schemas": { - "KvReadFooResponse": { + "KvFooResponse": { "type": "object", "properties": { "field_a": { @@ -65,3 +61,4 @@ } } } + diff --git a/sdk/framework/testing.go b/sdk/framework/testing.go index d2035d676f0c80..a00a3241cf82fb 100644 --- a/sdk/framework/testing.go +++ b/sdk/framework/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/wal.go b/sdk/framework/wal.go index b090f03e7bf330..7e7bb1afa959df 100644 --- a/sdk/framework/wal.go +++ b/sdk/framework/wal.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/framework/wal_test.go b/sdk/framework/wal_test.go index 040749239c8874..958be7e79ecd91 100644 --- a/sdk/framework/wal_test.go +++ b/sdk/framework/wal_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package framework import ( diff --git a/sdk/go.mod b/sdk/go.mod index c4a2ec99fdbcb6..a51b3f919be3c4 100644 --- a/sdk/go.mod +++ b/sdk/go.mod @@ -3,94 +3,65 @@ module github.com/hashicorp/vault/sdk go 1.19 require ( - github.com/armon/go-metrics v0.4.1 + github.com/armon/go-metrics v0.3.9 github.com/armon/go-radix v1.0.0 - github.com/cenkalti/backoff/v3 v3.2.2 - github.com/docker/docker v23.0.4+incompatible - github.com/docker/go-connections v0.4.0 - github.com/evanphx/json-patch/v5 v5.6.0 + github.com/evanphx/json-patch/v5 v5.5.0 github.com/fatih/structs v1.1.0 - github.com/go-ldap/ldap/v3 v3.4.1 - github.com/go-test/deep v1.1.0 + github.com/go-ldap/ldap/v3 v3.1.10 + github.com/go-test/deep v1.0.2 github.com/golang/protobuf v1.5.2 github.com/golang/snappy v0.0.4 - github.com/google/tink/go v1.7.0 github.com/hashicorp/errwrap v1.1.0 - github.com/hashicorp/go-cleanhttp v0.5.2 - github.com/hashicorp/go-hclog v1.4.0 + github.com/hashicorp/go-hclog v0.16.2 github.com/hashicorp/go-immutable-radix v1.3.1 github.com/hashicorp/go-kms-wrapping/entropy/v2 v2.0.0 github.com/hashicorp/go-kms-wrapping/v2 v2.0.8 github.com/hashicorp/go-multierror v1.1.1 - github.com/hashicorp/go-plugin v1.4.8 - github.com/hashicorp/go-retryablehttp v0.7.1 - github.com/hashicorp/go-secure-stdlib/base62 v0.1.2 - github.com/hashicorp/go-secure-stdlib/mlock v0.1.2 - github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7 + github.com/hashicorp/go-plugin v1.4.5 + github.com/hashicorp/go-retryablehttp v0.5.3 + github.com/hashicorp/go-secure-stdlib/base62 v0.1.1 + github.com/hashicorp/go-secure-stdlib/mlock v0.1.1 + github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6 github.com/hashicorp/go-secure-stdlib/password v0.1.1 github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 github.com/hashicorp/go-secure-stdlib/tlsutil v0.1.2 github.com/hashicorp/go-sockaddr v1.0.2 - github.com/hashicorp/go-uuid v1.0.3 - github.com/hashicorp/go-version v1.6.0 + github.com/hashicorp/go-uuid v1.0.2 + github.com/hashicorp/go-version v1.2.0 github.com/hashicorp/golang-lru v0.5.4 - github.com/hashicorp/hcl v1.0.1-vault-5 - github.com/hashicorp/vault/api v1.9.1 - github.com/mitchellh/copystructure v1.2.0 - github.com/mitchellh/go-testing-interface v1.14.1 + github.com/hashicorp/hcl v1.0.0 + github.com/mitchellh/copystructure v1.0.0 + github.com/mitchellh/go-testing-interface v1.0.0 github.com/mitchellh/mapstructure v1.5.0 - github.com/pierrec/lz4 v2.6.1+incompatible + github.com/pierrec/lz4 v2.5.2+incompatible github.com/ryanuber/go-glob v1.0.0 - github.com/stretchr/testify v1.8.2 + github.com/stretchr/testify v1.7.0 go.uber.org/atomic v1.9.0 golang.org/x/crypto v0.6.0 - golang.org/x/net v0.8.0 - golang.org/x/text v0.8.0 - google.golang.org/grpc v1.53.0 - google.golang.org/protobuf v1.28.1 + golang.org/x/text v0.7.0 + google.golang.org/grpc v1.41.0 + google.golang.org/protobuf v1.27.1 ) require ( - github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c // indirect - github.com/Microsoft/go-winio v0.6.1 // indirect - github.com/containerd/containerd v1.7.0 // indirect - github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/docker/distribution v2.8.1+incompatible // indirect - github.com/docker/go-units v0.5.0 // indirect - github.com/fatih/color v1.13.0 // indirect - github.com/frankban/quicktest v1.11.3 // indirect - github.com/go-asn1-ber/asn1-ber v1.5.1 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/hashicorp/go-rootcerts v1.0.2 // indirect - github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87 // indirect - github.com/klauspost/compress v1.16.5 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/fatih/color v1.7.0 // indirect + github.com/frankban/quicktest v1.10.0 // indirect + github.com/go-asn1-ber/asn1-ber v1.3.1 // indirect + github.com/hashicorp/go-cleanhttp v0.5.0 // indirect + github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb // indirect github.com/kr/text v0.2.0 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.17 // indirect - github.com/mitchellh/go-homedir v1.1.0 // indirect - github.com/mitchellh/reflectwalk v1.0.2 // indirect - github.com/moby/patternmatcher v0.5.0 // indirect - github.com/moby/sys/sequential v0.5.0 // indirect - github.com/moby/term v0.0.0-20221205130635-1aeaba878587 // indirect - github.com/morikuni/aec v1.0.0 // indirect - github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect - github.com/oklog/run v1.1.0 // indirect - github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b // indirect - github.com/opencontainers/runc v1.1.6 // indirect + github.com/mattn/go-colorable v0.1.6 // indirect + github.com/mattn/go-isatty v0.0.12 // indirect + github.com/mitchellh/reflectwalk v1.0.0 // indirect + github.com/oklog/run v1.0.0 // indirect github.com/pkg/errors v0.9.1 // indirect - github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/rogpeppe/go-internal v1.8.1 // indirect - github.com/sirupsen/logrus v1.9.0 // indirect - github.com/stretchr/objx v0.5.0 // indirect - golang.org/x/mod v0.8.0 // indirect - golang.org/x/sys v0.6.0 // indirect - golang.org/x/term v0.6.0 // indirect - golang.org/x/time v0.0.0-20220411224347-583f2d630306 // indirect - golang.org/x/tools v0.6.0 // indirect - google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 // indirect - gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect - gopkg.in/square/go-jose.v2 v2.6.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect - gotest.tools/v3 v3.4.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/rogpeppe/go-internal v1.9.0 // indirect + github.com/stretchr/objx v0.1.1 // indirect + golang.org/x/net v0.7.0 // indirect + golang.org/x/sys v0.5.0 // indirect + golang.org/x/term v0.5.0 // indirect + google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 // indirect + gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect ) diff --git a/sdk/go.sum b/sdk/go.sum index a6b0f59b096cef..b33acfc9cfe341 100644 --- a/sdk/go.sum +++ b/sdk/go.sum @@ -1,16 +1,14 @@ -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= -github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c h1:/IBSNwUN8+eKzUzbJPqhK839ygXJ82sde8x3ogr6R28= -github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= -github.com/Microsoft/hcsshim v0.10.0-rc.7 h1:HBytQPxcv8Oy4244zbQbe6hnOnx544eL5QPUqhJldz8= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= -github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/go-metrics v0.3.9 h1:O2sNqxBdvq8Eq5xmzljcYzAORli6RWCvEym4cJf9m18= +github.com/armon/go-metrics v0.3.9/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= @@ -18,76 +16,81 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24 github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/cenkalti/backoff/v3 v3.2.2 h1:cfUAAO3yvKMYKPrvhDuHSwQnhZNk/RMHKdZqKTxfm6M= -github.com/cenkalti/backoff/v3 v3.2.2/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= -github.com/containerd/containerd v1.7.0 h1:G/ZQr3gMZs6ZT0qPUZ15znx5QSdQdASW11nXTLTM2Pg= -github.com/containerd/containerd v1.7.0/go.mod h1:QfR7Efgb/6X2BDpTPJRvPTYDE9rsF0FsXX9J8sIs/sc= -github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= -github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/docker/distribution v2.8.1+incompatible h1:Q50tZOPR6T/hjNsyc9g8/syEs6bk8XXApsHjKukMl68= -github.com/docker/distribution v2.8.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v23.0.4+incompatible h1:Kd3Bh9V/rO+XpTP/BLqM+gx8z7+Yb0AA2Ibj+nNo4ek= -github.com/docker/docker v23.0.4+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= -github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= -github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= -github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= -github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/evanphx/json-patch/v5 v5.5.0 h1:bAmFiUJ+o0o2B4OiTFeE3MqCOtyo+jjPP9iZ0VRxYUc= +github.com/evanphx/json-patch/v5 v5.5.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= +github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= -github.com/frankban/quicktest v1.11.3 h1:8sXhOn0uLys67V8EsXLc6eszDs8VXWxL3iRvebPhedY= -github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= -github.com/go-asn1-ber/asn1-ber v1.5.1 h1:pDbRAunXzIUXfx4CB2QJFv5IuPiuoW+sWvr/Us009o8= -github.com/go-asn1-ber/asn1-ber v1.5.1/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0= +github.com/frankban/quicktest v1.10.0 h1:Gfh+GAJZOAoKZsIZeZbdn2JF10kN1XHNvjsvQK8gVkE= +github.com/frankban/quicktest v1.10.0/go.mod h1:ui7WezCLWMWxVWr1GETZY3smRy0G4KWq9vcPtJmFl7Y= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-asn1-ber/asn1-ber v1.3.1 h1:gvPdv/Hr++TRFCl0UbPFHC54P9N9jgsRPnmnr419Uck= +github.com/go-asn1-ber/asn1-ber v1.3.1/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-ldap/ldap/v3 v3.4.1 h1:fU/0xli6HY02ocbMuozHAYsaHLcnkLjvho2r5a34BUU= -github.com/go-ldap/ldap/v3 v3.4.1/go.mod h1:iYS1MdmrmceOJ1QOTnRXrIs7i3kloqtmGQjRvjKpyMg= +github.com/go-ldap/ldap/v3 v3.1.10 h1:7WsKqasmPThNvdl0Q5GPpbTDD/ZD98CfuawrMIuh7qQ= +github.com/go-ldap/ldap/v3 v3.1.10/go.mod h1:5Zun81jBTabRaI8lzN7E1JjyEl1g6zI6u9pd8luAK4Q= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg= -github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= +github.com/go-test/deep v1.0.2 h1:onZX1rnHT3Wv6cqNgYyFOOlgVKJrksuCMCRvJStbMYw= +github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/tink/go v1.7.0 h1:6Eox8zONGebBFcCBqkVmt60LaWZa6xg1cl/DwAh/J1w= -github.com/google/tink/go v1.7.0/go.mod h1:GAUOd+QE3pgj9q8VKIGTCP33c/B7eb4NhxLcgTJZStM= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0 h1:wvCrVc9TjDls6+YGAF2hAifE1E5U1+b4tH6KdvN3Gig= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= -github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v1.4.0 h1:ctuWFGrhFha8BnnzxqeRGidlEcQkDyL5u8J8t5eA11I= -github.com/hashicorp/go-hclog v1.4.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-hclog v0.16.2 h1:K4ev2ib4LdQETX5cSZBG0DVLk1jwGqSPXBjdah3veNs= +github.com/hashicorp/go-hclog v0.16.2/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= @@ -98,20 +101,17 @@ github.com/hashicorp/go-kms-wrapping/v2 v2.0.8/go.mod h1:qTCjxGig/kjuj3hk1z8pOUr github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-plugin v1.4.8 h1:CHGwpxYDOttQOY7HOWgETU9dyVjOXzniXDqJcYJE1zM= -github.com/hashicorp/go-plugin v1.4.8/go.mod h1:viDMjcLJuDui6pXb8U4HVfb8AamCWhHGUjr2IrTF67s= +github.com/hashicorp/go-plugin v1.4.5 h1:oTE/oQR4eghggRg8VY7PAz3dr++VwDNBGCcOfIvHpBo= +github.com/hashicorp/go-plugin v1.4.5/go.mod h1:viDMjcLJuDui6pXb8U4HVfb8AamCWhHGUjr2IrTF67s= +github.com/hashicorp/go-retryablehttp v0.5.3 h1:QlWt0KvWT0lq8MFppF9tsJGF+ynG7ztc2KIPhzRGk7s= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/go-retryablehttp v0.7.1 h1:sUiuQAnLlbvmExtFQs72iFW/HXeUn8Z1aJLQ4LJJbTQ= -github.com/hashicorp/go-retryablehttp v0.7.1/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= -github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= -github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-secure-stdlib/base62 v0.1.2 h1:ET4pqyjiGmY09R5y+rSd70J2w45CtbWDNvGqWp/R3Ng= -github.com/hashicorp/go-secure-stdlib/base62 v0.1.2/go.mod h1:EdWO6czbmthiwZ3/PUsDV+UD1D5IRU4ActiaWGwt0Yw= -github.com/hashicorp/go-secure-stdlib/mlock v0.1.2 h1:p4AKXPPS24tO8Wc8i1gLvSKdmkiSY5xuju57czJ/IJQ= -github.com/hashicorp/go-secure-stdlib/mlock v0.1.2/go.mod h1:zq93CJChV6L9QTfGKtfBxKqD7BqqXx5O04A/ns2p5+I= +github.com/hashicorp/go-secure-stdlib/base62 v0.1.1 h1:6KMBnfEv0/kLAz0O76sliN5mXbCDcLfs2kP7ssP7+DQ= +github.com/hashicorp/go-secure-stdlib/base62 v0.1.1/go.mod h1:EdWO6czbmthiwZ3/PUsDV+UD1D5IRU4ActiaWGwt0Yw= +github.com/hashicorp/go-secure-stdlib/mlock v0.1.1 h1:cCRo8gK7oq6A2L6LICkUZ+/a5rLiRXFMf1Qd4xSwxTc= +github.com/hashicorp/go-secure-stdlib/mlock v0.1.1/go.mod h1:zq93CJChV6L9QTfGKtfBxKqD7BqqXx5O04A/ns2p5+I= github.com/hashicorp/go-secure-stdlib/parseutil v0.1.1/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= -github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7 h1:UpiO20jno/eV1eVZcxqWnUohyKRe1g8FPV/xH1s/2qs= -github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6 h1:om4Al8Oy7kCm/B86rLCLah4Dt5Aa0Fr5rYBG60OzwHQ= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= github.com/hashicorp/go-secure-stdlib/password v0.1.1 h1:6JzmBqXprakgFEHwBgdchsjaA9x3GyjdI568bXKxa60= github.com/hashicorp/go-secure-stdlib/password v0.1.1/go.mod h1:9hH302QllNwu1o2TGYtSk8I8kTAN0ca1EHpwhm5Mmzo= github.com/hashicorp/go-secure-stdlib/strutil v0.1.1/go.mod h1:gKOamz3EwoIoJq7mlMIRBpVTAUn8qPCrEclOKKWhD3U= @@ -122,228 +122,203 @@ github.com/hashicorp/go-secure-stdlib/tlsutil v0.1.2/go.mod h1:l8slYwnJA26yBz+Er github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= -github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= -github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.2.0 h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/hcl v1.0.1-vault-5 h1:kI3hhbbyzr4dldA8UdTb7ZlVVlI2DACdCfz31RPDgJM= -github.com/hashicorp/hcl v1.0.1-vault-5/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM= -github.com/hashicorp/vault/api v1.9.1 h1:LtY/I16+5jVGU8rufyyAkwopgq/HpUnxFBg+QLOAV38= -github.com/hashicorp/vault/api v1.9.1/go.mod h1:78kktNcQYbBGSrOjQfHjXN32OhhxXnbYl3zxpd2uPUs= -github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87 h1:xixZ2bWeofWV68J+x6AzmKuVM/JWCQwkWm6GW/MUR6I= -github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M= +github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI= -github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= -github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= -github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= -github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= +github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= -github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/moby/patternmatcher v0.5.0 h1:YCZgJOeULcxLw1Q+sVR636pmS7sPEn1Qo2iAN6M7DBo= -github.com/moby/patternmatcher v0.5.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= -github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= -github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= -github.com/moby/term v0.0.0-20221205130635-1aeaba878587 h1:HfkjXDfhgVaN5rmueG8cL8KKeFNecRCXFhaJ2qZ5SKA= -github.com/moby/term v0.0.0-20221205130635-1aeaba878587/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= -github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= -github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= -github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b h1:YWuSjZCQAPM8UUBLkYUk1e+rZcvWHJmFb6i6rM44Xs8= -github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b/go.mod h1:3OVijpioIKYWTqjiG0zfF6wvoJ4fAXGbjdZuI2NgsRQ= -github.com/opencontainers/runc v1.1.6 h1:XbhB8IfG/EsnhNvZtNdLB0GBw92GYEFvKlhaJk9jUgA= -github.com/opencontainers/runc v1.1.6/go.mod h1:CbUumNnWCuTGFukNXahoo/RFBZvDAgRh/smNYNOhA50= +github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM= -github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pierrec/lz4 v2.5.2+incompatible h1:WCjObylUIOlKy/+7Abdn34TLIkXiA4UWUMhxq9m9ZXI= +github.com/pierrec/lz4 v2.5.2+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= -github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= -github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg= -github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= -github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200604202706-70a84ac30bf9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ= -golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw= -golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.5.0 h1:n2a8QNdAb0sZNpU9R1ALUXBbY+w51fCQDN+7EdxNBsY= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68= -golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/time v0.0.0-20220411224347-583f2d630306 h1:+gHMid33q6pen7kv9xvT+JRinntgeXO2AeZVd0AWD3w= -golang.org/x/time v0.0.0-20220411224347-583f2d630306/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= -golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 h1:DdoeryqhaXp1LtT/emMP1BRJPHHKFi5akj/nbx/zNTA= -google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= -google.golang.org/grpc v1.53.0 h1:LAv2ds7cmFV/XTS3XG1NneeENYrXGmorPxsBbptIjNc= -google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.41.0 h1:f+PlOh7QV4iIJkPrx5NQ7qaNGFQ3OTse67yaDHfju4E= +google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/square/go-jose.v2 v2.6.0 h1:NGk74WTnPKBNUhNzQX7PYcTLUjoq7mzKk2OKbvwk2iI= -gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/v3 v3.4.0 h1:ZazjZUfuVeZGLAmlKKuyv3IKP5orXcwtOwDQH6YVr6o= -gotest.tools/v3 v3.4.0/go.mod h1:CtbdzLSsqVhDgMtKsx03ird5YTGB3ar27v0u/yKBW5g= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/sdk/helper/authmetadata/auth_metadata.go b/sdk/helper/authmetadata/auth_metadata.go index e490ab359abaad..0fd2bd50f83092 100644 --- a/sdk/helper/authmetadata/auth_metadata.go +++ b/sdk/helper/authmetadata/auth_metadata.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package authmetadata /* diff --git a/sdk/helper/authmetadata/auth_metadata_acc_test.go b/sdk/helper/authmetadata/auth_metadata_acc_test.go index 189c960098d319..39888c69a16cb8 100644 --- a/sdk/helper/authmetadata/auth_metadata_acc_test.go +++ b/sdk/helper/authmetadata/auth_metadata_acc_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package authmetadata import ( diff --git a/sdk/helper/authmetadata/auth_metadata_test.go b/sdk/helper/authmetadata/auth_metadata_test.go index a82044f9bc4364..62341ebc85fbb2 100644 --- a/sdk/helper/authmetadata/auth_metadata_test.go +++ b/sdk/helper/authmetadata/auth_metadata_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package authmetadata import ( diff --git a/sdk/helper/base62/base62.go b/sdk/helper/base62/base62.go index 7d2c7d5ba15895..981face425d45a 100644 --- a/sdk/helper/base62/base62.go +++ b/sdk/helper/base62/base62.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // DEPRECATED: this has been moved to go-secure-stdlib and will be removed package base62 diff --git a/sdk/helper/certutil/certutil_test.go b/sdk/helper/certutil/certutil_test.go index 8b550946121d4c..9c10f38a818a6b 100644 --- a/sdk/helper/certutil/certutil_test.go +++ b/sdk/helper/certutil/certutil_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package certutil import ( @@ -13,7 +10,6 @@ import ( "crypto/rsa" "crypto/x509" "crypto/x509/pkix" - "encoding/asn1" "encoding/json" "encoding/pem" "fmt" @@ -946,66 +942,6 @@ func TestSignatureAlgorithmRoundTripping(t *testing.T) { } } -// TestParseBasicConstraintExtension Verify extension generation and parsing of x509 basic constraint extensions -// works as expected. -func TestBasicConstraintExtension(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - isCA bool - maxPathLen int - }{ - {"empty-seq", false, -1}, - {"just-ca-true", true, -1}, - {"just-ca-with-maxpathlen", true, 2}, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - ext, err := CreateBasicConstraintExtension(tt.isCA, tt.maxPathLen) - if err != nil { - t.Fatalf("failed generating basic extension: %v", err) - } - - gotIsCa, gotMaxPathLen, err := ParseBasicConstraintExtension(ext) - if err != nil { - t.Fatalf("failed parsing basic extension: %v", err) - } - - if tt.isCA != gotIsCa { - t.Fatalf("expected isCa (%v) got isCa (%v)", tt.isCA, gotIsCa) - } - - if tt.maxPathLen != gotMaxPathLen { - t.Fatalf("expected maxPathLen (%v) got maxPathLen (%v)", tt.maxPathLen, gotMaxPathLen) - } - }) - } - - t.Run("bad-extension-oid", func(t *testing.T) { - // Test invalid type errors out - _, _, err := ParseBasicConstraintExtension(pkix.Extension{}) - if err == nil { - t.Fatalf("should have failed parsing non-basic constraint extension") - } - }) - - t.Run("garbage-value", func(t *testing.T) { - extraBytes, err := asn1.Marshal("a string") - if err != nil { - t.Fatalf("failed encoding the struct: %v", err) - } - ext := pkix.Extension{ - Id: ExtensionBasicConstraintsOID, - Value: extraBytes, - } - _, _, err = ParseBasicConstraintExtension(ext) - if err == nil { - t.Fatalf("should have failed parsing basic constraint with extra information") - } - }) -} - func genRsaKey(t *testing.T) *rsa.PrivateKey { key, err := rsa.GenerateKey(rand.Reader, 2048) if err != nil { diff --git a/sdk/helper/certutil/helpers.go b/sdk/helper/certutil/helpers.go index 28472027f8cd0c..547508f6838b78 100644 --- a/sdk/helper/certutil/helpers.go +++ b/sdk/helper/certutil/helpers.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package certutil import ( @@ -1042,8 +1039,8 @@ func selectSignatureAlgorithmForECDSA(pub crypto.PublicKey, signatureBits int) x } var ( - ExtensionBasicConstraintsOID = []int{2, 5, 29, 19} - ExtensionSubjectAltNameOID = []int{2, 5, 29, 17} + oidExtensionBasicConstraints = []int{2, 5, 29, 19} + oidExtensionSubjectAltName = []int{2, 5, 29, 17} ) // CreateCSR creates a CSR with the default rand.Reader to @@ -1098,7 +1095,7 @@ func createCSR(data *CreationBundle, addBasicConstraints bool, randReader io.Rea return nil, errutil.InternalError{Err: errwrap.Wrapf("error marshaling basic constraints: {{err}}", err).Error()} } ext := pkix.Extension{ - Id: ExtensionBasicConstraintsOID, + Id: oidExtensionBasicConstraints, Value: val, Critical: true, } @@ -1219,7 +1216,7 @@ func signCertificate(data *CreationBundle, randReader io.Reader) (*ParsedCertBun certTemplate.URIs = data.CSR.URIs for _, name := range data.CSR.Extensions { - if !name.Id.Equal(ExtensionBasicConstraintsOID) && !(len(data.Params.OtherSANs) > 0 && name.Id.Equal(ExtensionSubjectAltNameOID)) { + if !name.Id.Equal(oidExtensionBasicConstraints) && !(len(data.Params.OtherSANs) > 0 && name.Id.Equal(oidExtensionSubjectAltName)) { certTemplate.ExtraExtensions = append(certTemplate.ExtraExtensions, name) } } @@ -1392,68 +1389,3 @@ func CreateDeltaCRLIndicatorExt(completeCRLNumber int64) (pkix.Extension, error) Value: bigNumValue, }, nil } - -// ParseBasicConstraintExtension parses a basic constraint pkix.Extension, useful if attempting to validate -// CSRs are requesting CA privileges as Go does not expose its implementation. Values returned are -// IsCA, MaxPathLen or error. If MaxPathLen was not set, a value of -1 will be returned. -func ParseBasicConstraintExtension(ext pkix.Extension) (bool, int, error) { - if !ext.Id.Equal(ExtensionBasicConstraintsOID) { - return false, -1, fmt.Errorf("passed in extension was not a basic constraint extension") - } - - // All elements are set to optional here, as it is possible that we receive a CSR with the extension - // containing an empty sequence by spec. - type basicConstraints struct { - IsCA bool `asn1:"optional"` - MaxPathLen int `asn1:"optional,default:-1"` - } - bc := &basicConstraints{} - leftOver, err := asn1.Unmarshal(ext.Value, bc) - if err != nil { - return false, -1, fmt.Errorf("failed unmarshalling extension value: %w", err) - } - - numLeftOver := len(bytes.TrimSpace(leftOver)) - if numLeftOver > 0 { - return false, -1, fmt.Errorf("%d extra bytes within basic constraints value extension", numLeftOver) - } - - return bc.IsCA, bc.MaxPathLen, nil -} - -// CreateBasicConstraintExtension create a basic constraint extension based on inputs, -// if isCa is false, an empty value sequence will be returned with maxPath being -// ignored. If isCa is true maxPath can be set to -1 to not set a maxPath value. -func CreateBasicConstraintExtension(isCa bool, maxPath int) (pkix.Extension, error) { - var asn1Bytes []byte - var err error - - switch { - case isCa && maxPath >= 0: - CaAndMaxPathLen := struct { - IsCa bool `asn1:""` - MaxPathLen int `asn1:""` - }{ - IsCa: isCa, - MaxPathLen: maxPath, - } - asn1Bytes, err = asn1.Marshal(CaAndMaxPathLen) - case isCa && maxPath < 0: - justCa := struct { - IsCa bool `asn1:""` - }{IsCa: isCa} - asn1Bytes, err = asn1.Marshal(justCa) - default: - asn1Bytes, err = asn1.Marshal(struct{}{}) - } - - if err != nil { - return pkix.Extension{}, err - } - - return pkix.Extension{ - Id: ExtensionBasicConstraintsOID, - Critical: true, - Value: asn1Bytes, - }, nil -} diff --git a/sdk/helper/certutil/types.go b/sdk/helper/certutil/types.go index 039ff8a52291a2..8a806c6f8654cd 100644 --- a/sdk/helper/certutil/types.go +++ b/sdk/helper/certutil/types.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Package certutil contains helper functions that are mostly used // with the PKI backend but can be generally useful. Functionality // includes helpers for converting a certificate/private key bundle diff --git a/sdk/helper/cidrutil/cidr.go b/sdk/helper/cidrutil/cidr.go index 9d2a41829c4c3d..7e48c2be5034bd 100644 --- a/sdk/helper/cidrutil/cidr.go +++ b/sdk/helper/cidrutil/cidr.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cidrutil import ( diff --git a/sdk/helper/cidrutil/cidr_test.go b/sdk/helper/cidrutil/cidr_test.go index e6fc5764452f2c..6a8662cdf17ff4 100644 --- a/sdk/helper/cidrutil/cidr_test.go +++ b/sdk/helper/cidrutil/cidr_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cidrutil import ( diff --git a/sdk/helper/compressutil/compress.go b/sdk/helper/compressutil/compress.go index 9e96d8dd32ec8c..924f82a2a1baac 100644 --- a/sdk/helper/compressutil/compress.go +++ b/sdk/helper/compressutil/compress.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package compressutil import ( diff --git a/sdk/helper/compressutil/compress_test.go b/sdk/helper/compressutil/compress_test.go index 7d90ce87e982f5..f85f3c935ba1ba 100644 --- a/sdk/helper/compressutil/compress_test.go +++ b/sdk/helper/compressutil/compress_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package compressutil import ( diff --git a/sdk/helper/consts/agent.go b/sdk/helper/consts/agent.go index 53b8b8e2e76e6a..92207e3d818ba3 100644 --- a/sdk/helper/consts/agent.go +++ b/sdk/helper/consts/agent.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consts // AgentPathCacheClear is the path that the agent will use as its cache-clear diff --git a/sdk/helper/consts/consts.go b/sdk/helper/consts/consts.go index 744d2aa81c720f..b820600786b8e6 100644 --- a/sdk/helper/consts/consts.go +++ b/sdk/helper/consts/consts.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consts const ( diff --git a/sdk/helper/consts/deprecation_status.go b/sdk/helper/consts/deprecation_status.go index e72292bee6c3e6..656d6cc992a759 100644 --- a/sdk/helper/consts/deprecation_status.go +++ b/sdk/helper/consts/deprecation_status.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consts // EnvVaultAllowPendingRemovalMounts allows Pending Removal builtins to be diff --git a/sdk/helper/consts/error.go b/sdk/helper/consts/error.go index 5bd3f5e6e26127..1a9175c6392dfc 100644 --- a/sdk/helper/consts/error.go +++ b/sdk/helper/consts/error.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consts import "errors" diff --git a/sdk/helper/consts/plugin_types.go b/sdk/helper/consts/plugin_types.go index 6bc14b54f7167f..f2815d4fede03b 100644 --- a/sdk/helper/consts/plugin_types.go +++ b/sdk/helper/consts/plugin_types.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consts // NOTE: this file has been copied to diff --git a/sdk/helper/consts/proxy.go b/sdk/helper/consts/proxy.go deleted file mode 100644 index 0fc4117ccc1d7f..00000000000000 --- a/sdk/helper/consts/proxy.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package consts - -// ProxyPathCacheClear is the path that the proxy will use as its cache-clear -// endpoint. -const ProxyPathCacheClear = "/proxy/v1/cache-clear" - -// ProxyPathMetrics is the path the proxy will use to expose its internal -// metrics. -const ProxyPathMetrics = "/proxy/v1/metrics" - -// ProxyPathQuit is the path that the proxy will use to trigger stopping it. -const ProxyPathQuit = "/proxy/v1/quit" diff --git a/sdk/helper/consts/replication.go b/sdk/helper/consts/replication.go index 2a1511a9a93388..f72c2f47aee2e1 100644 --- a/sdk/helper/consts/replication.go +++ b/sdk/helper/consts/replication.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consts const ( diff --git a/sdk/helper/consts/token_consts.go b/sdk/helper/consts/token_consts.go index 108e7ba42d78ae..2b4e0278bf2875 100644 --- a/sdk/helper/consts/token_consts.go +++ b/sdk/helper/consts/token_consts.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consts const ( diff --git a/sdk/helper/cryptoutil/cryptoutil.go b/sdk/helper/cryptoutil/cryptoutil.go index 956dad3408783f..a37086c645d80b 100644 --- a/sdk/helper/cryptoutil/cryptoutil.go +++ b/sdk/helper/cryptoutil/cryptoutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cryptoutil import "golang.org/x/crypto/blake2b" diff --git a/sdk/helper/cryptoutil/cryptoutil_test.go b/sdk/helper/cryptoutil/cryptoutil_test.go index 35799e42a2eae7..a277e4fcee4008 100644 --- a/sdk/helper/cryptoutil/cryptoutil_test.go +++ b/sdk/helper/cryptoutil/cryptoutil_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package cryptoutil import "testing" diff --git a/sdk/helper/custommetadata/custom_metadata.go b/sdk/helper/custommetadata/custom_metadata.go index 81d4c27035d227..7d4ff8763d1167 100644 --- a/sdk/helper/custommetadata/custom_metadata.go +++ b/sdk/helper/custommetadata/custom_metadata.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package custommetadata import ( diff --git a/sdk/helper/custommetadata/custom_metadata_test.go b/sdk/helper/custommetadata/custom_metadata_test.go index 2b25d991203c05..e71bd59462fedf 100644 --- a/sdk/helper/custommetadata/custom_metadata_test.go +++ b/sdk/helper/custommetadata/custom_metadata_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package custommetadata import ( diff --git a/sdk/helper/dbtxn/dbtxn.go b/sdk/helper/dbtxn/dbtxn.go index 12288d5b37c827..133b360e73e825 100644 --- a/sdk/helper/dbtxn/dbtxn.go +++ b/sdk/helper/dbtxn/dbtxn.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package dbtxn import ( diff --git a/sdk/helper/errutil/error.go b/sdk/helper/errutil/error.go index 1866343b518350..0b95efb40e3a9c 100644 --- a/sdk/helper/errutil/error.go +++ b/sdk/helper/errutil/error.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package errutil // UserError represents an error generated due to invalid user input diff --git a/sdk/helper/hclutil/hcl.go b/sdk/helper/hclutil/hcl.go index a78d820087d434..0b120367d5a6ac 100644 --- a/sdk/helper/hclutil/hcl.go +++ b/sdk/helper/hclutil/hcl.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package hclutil import ( diff --git a/sdk/helper/identitytpl/templating.go b/sdk/helper/identitytpl/templating.go index 124a27c920c301..6d84df8241dedf 100644 --- a/sdk/helper/identitytpl/templating.go +++ b/sdk/helper/identitytpl/templating.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package identitytpl import ( diff --git a/sdk/helper/identitytpl/templating_test.go b/sdk/helper/identitytpl/templating_test.go index d17409e78ae66b..15bfc812387c65 100644 --- a/sdk/helper/identitytpl/templating_test.go +++ b/sdk/helper/identitytpl/templating_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package identitytpl import ( diff --git a/sdk/helper/jsonutil/json.go b/sdk/helper/jsonutil/json.go index 1abd9fafebdc82..c03a4f8c8d14c4 100644 --- a/sdk/helper/jsonutil/json.go +++ b/sdk/helper/jsonutil/json.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package jsonutil import ( diff --git a/sdk/helper/jsonutil/json_test.go b/sdk/helper/jsonutil/json_test.go index 10aabf1b93ea37..dd33f9bf179aa0 100644 --- a/sdk/helper/jsonutil/json_test.go +++ b/sdk/helper/jsonutil/json_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package jsonutil import ( diff --git a/sdk/helper/kdf/kdf.go b/sdk/helper/kdf/kdf.go index e9964ba28c4f72..9d3e0e858585e6 100644 --- a/sdk/helper/kdf/kdf.go +++ b/sdk/helper/kdf/kdf.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // This package is used to implement Key Derivation Functions (KDF) // based on the recommendations of NIST SP 800-108. These are useful // for generating unique-per-transaction keys, or situations in which diff --git a/sdk/helper/kdf/kdf_test.go b/sdk/helper/kdf/kdf_test.go index ed5c0a13d36b4c..2148257f357c25 100644 --- a/sdk/helper/kdf/kdf_test.go +++ b/sdk/helper/kdf/kdf_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kdf import ( diff --git a/sdk/helper/keysutil/cache.go b/sdk/helper/keysutil/cache.go index fb55091e40a8c3..7da9c202fa58b9 100644 --- a/sdk/helper/keysutil/cache.go +++ b/sdk/helper/keysutil/cache.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil type Cache interface { diff --git a/sdk/helper/keysutil/consts.go b/sdk/helper/keysutil/consts.go index b6842324232136..e6c657b9115e84 100644 --- a/sdk/helper/keysutil/consts.go +++ b/sdk/helper/keysutil/consts.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import ( diff --git a/sdk/helper/keysutil/encrypted_key_storage.go b/sdk/helper/keysutil/encrypted_key_storage.go index 7314758bc16794..90eaaf0bbae1f6 100644 --- a/sdk/helper/keysutil/encrypted_key_storage.go +++ b/sdk/helper/keysutil/encrypted_key_storage.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import ( diff --git a/sdk/helper/keysutil/encrypted_key_storage_test.go b/sdk/helper/keysutil/encrypted_key_storage_test.go index 5147027fc8839e..2f29d14b7ad7fc 100644 --- a/sdk/helper/keysutil/encrypted_key_storage_test.go +++ b/sdk/helper/keysutil/encrypted_key_storage_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import ( diff --git a/sdk/helper/keysutil/lock_manager.go b/sdk/helper/keysutil/lock_manager.go index 6d2881e0d8daa4..03708e8f840ae0 100644 --- a/sdk/helper/keysutil/lock_manager.go +++ b/sdk/helper/keysutil/lock_manager.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import ( @@ -63,9 +60,6 @@ type PolicyRequest struct { // AllowImportedKeyRotation indicates whether an imported key may be rotated by Vault AllowImportedKeyRotation bool - // Indicates whether a private or public key is imported/upserted - IsPrivateKey bool - // The UUID of the managed key, if using one ManagedKeyUUID string } @@ -514,7 +508,7 @@ func (lm *LockManager) ImportPolicy(ctx context.Context, req PolicyRequest, key } } - err = p.ImportPublicOrPrivate(ctx, req.Storage, key, req.IsPrivateKey, rand) + err = p.Import(ctx, req.Storage, key, rand) if err != nil { return fmt.Errorf("error importing key: %s", err) } diff --git a/sdk/helper/keysutil/managed_key_util.go b/sdk/helper/keysutil/managed_key_util.go index bb3c0b2968b737..6647218bf6cd13 100644 --- a/sdk/helper/keysutil/managed_key_util.go +++ b/sdk/helper/keysutil/managed_key_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build !enterprise package keysutil diff --git a/sdk/helper/keysutil/policy.go b/sdk/helper/keysutil/policy.go index 869733b3e9d4eb..361e21fa21b3ed 100644 --- a/sdk/helper/keysutil/policy.go +++ b/sdk/helper/keysutil/policy.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import ( @@ -22,7 +19,6 @@ import ( "encoding/pem" "errors" "fmt" - "hash" "io" "math/big" "path" @@ -42,8 +38,6 @@ import ( "github.com/hashicorp/vault/sdk/helper/jsonutil" "github.com/hashicorp/vault/sdk/helper/kdf" "github.com/hashicorp/vault/sdk/logical" - - "github.com/google/tink/go/kwp/subtle" ) // Careful with iota; don't put anything before it in this const block because @@ -170,14 +164,6 @@ func (kt KeyType) AssociatedDataSupported() bool { return false } -func (kt KeyType) ImportPublicKeySupported() bool { - switch kt { - case KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096, KeyType_ECDSA_P256, KeyType_ECDSA_P384, KeyType_ECDSA_P521, KeyType_ED25519: - return true - } - return false -} - func (kt KeyType) String() string { switch kt { case KeyType_AES128_GCM96: @@ -229,8 +215,7 @@ type KeyEntry struct { EC_Y *big.Int `json:"ec_y"` EC_D *big.Int `json:"ec_d"` - RSAKey *rsa.PrivateKey `json:"rsa_key"` - RSAPublicKey *rsa.PublicKey `json:"rsa_public_key"` + RSAKey *rsa.PrivateKey `json:"rsa_key"` // The public key in an appropriate format for the type of key FormattedPublicKey string `json:"public_key"` @@ -246,14 +231,6 @@ type KeyEntry struct { ManagedKeyUUID string `json:"managed_key_id,omitempty"` } -func (ke *KeyEntry) IsPrivateKeyMissing() bool { - if ke.RSAKey != nil || ke.EC_D != nil || len(ke.Key) != 0 { - return false - } - - return true -} - // deprecatedKeyEntryMap is used to allow JSON marshal/unmarshal type deprecatedKeyEntryMap map[int]KeyEntry @@ -355,19 +332,6 @@ func LoadPolicy(ctx context.Context, s logical.Storage, path string) (*Policy, e return nil, err } - // Migrate RSA private keys to include their private counterpart. This lets - // us reference RSAPublicKey whenever we need to, without necessarily - // needing the private key handy, synchronizing the behavior with EC and - // Ed25519 key pairs. - switch policy.Type { - case KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096: - for _, entry := range policy.Keys { - if entry.RSAPublicKey == nil && entry.RSAKey != nil { - entry.RSAPublicKey = entry.RSAKey.Public().(*rsa.PublicKey) - } - } - } - policy.l = new(sync.RWMutex) return &policy, nil @@ -1006,9 +970,6 @@ func (p *Policy) DecryptWithFactory(context, nonce []byte, value string, factori return "", err } key := keyEntry.RSAKey - if key == nil { - return "", errutil.InternalError{Err: fmt.Sprintf("cannot decrypt ciphertext, key version does not have a private counterpart")} - } plain, err = rsa.DecryptOAEP(sha256.New(), rand.Reader, key, decoded, nil) if err != nil { return "", errutil.InternalError{Err: fmt.Sprintf("failed to RSA decrypt the ciphertext: %v", err)} @@ -1083,13 +1044,13 @@ func (p *Policy) minRSAPSSSaltLength() int { return rsa.PSSSaltLengthEqualsHash } -func (p *Policy) maxRSAPSSSaltLength(keyBitLen int, hash crypto.Hash) int { +func (p *Policy) maxRSAPSSSaltLength(priv *rsa.PrivateKey, hash crypto.Hash) int { // https://cs.opensource.google/go/go/+/refs/tags/go1.19:src/crypto/rsa/pss.go;l=288 - return (keyBitLen-1+7)/8 - 2 - hash.Size() + return (priv.N.BitLen()-1+7)/8 - 2 - hash.Size() } -func (p *Policy) validRSAPSSSaltLength(keyBitLen int, hash crypto.Hash, saltLength int) bool { - return p.minRSAPSSSaltLength() <= saltLength && saltLength <= p.maxRSAPSSSaltLength(keyBitLen, hash) +func (p *Policy) validRSAPSSSaltLength(priv *rsa.PrivateKey, hash crypto.Hash, saltLength int) bool { + return p.minRSAPSSSaltLength() <= saltLength && saltLength <= p.maxRSAPSSSaltLength(priv, hash) } func (p *Policy) SignWithOptions(ver int, context, input []byte, options *SigningOptions) (*SigningResult, error) { @@ -1116,11 +1077,6 @@ func (p *Policy) SignWithOptions(ver int, context, input []byte, options *Signin return nil, err } - // Before signing, check if key has its private part, if not return error - if keyParams.IsPrivateKeyMissing() { - return nil, errutil.UserError{Err: "requested version for signing does not contain a private part"} - } - hashAlgorithm := options.HashAlgorithm marshaling := options.Marshaling saltLength := options.SaltLength @@ -1227,7 +1183,7 @@ func (p *Policy) SignWithOptions(ver int, context, input []byte, options *Signin switch sigAlgorithm { case "pss": - if !p.validRSAPSSSaltLength(key.N.BitLen(), algo, saltLength) { + if !p.validRSAPSSSaltLength(key, algo, saltLength) { return nil, errutil.UserError{Err: fmt.Sprintf("requested salt length %d is invalid", saltLength)} } sig, err = rsa.SignPSS(rand.Reader, key, algo, input, &rsa.PSSOptions{SaltLength: saltLength}) @@ -1381,30 +1337,20 @@ func (p *Policy) VerifySignatureWithOptions(context, input []byte, sig string, o return ecdsa.Verify(key, input, ecdsaSig.R, ecdsaSig.S), nil case KeyType_ED25519: - var pub ed25519.PublicKey + var key ed25519.PrivateKey if p.Derived { // Derive the key that should be used - key, err := p.GetKey(context, ver, 32) + var err error + key, err = p.GetKey(context, ver, 32) if err != nil { return false, errutil.InternalError{Err: fmt.Sprintf("error deriving key: %v", err)} } - pub = ed25519.PrivateKey(key).Public().(ed25519.PublicKey) } else { - keyEntry, err := p.safeGetKeyEntry(ver) - if err != nil { - return false, err - } - - raw, err := base64.StdEncoding.DecodeString(keyEntry.FormattedPublicKey) - if err != nil { - return false, err - } - - pub = ed25519.PublicKey(raw) + key = ed25519.PrivateKey(p.Keys[strconv.Itoa(ver)].Key) } - return ed25519.Verify(pub, input, sigBytes), nil + return ed25519.Verify(key.Public().(ed25519.PublicKey), input, sigBytes), nil case KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096: keyEntry, err := p.safeGetKeyEntry(ver) @@ -1412,6 +1358,8 @@ func (p *Policy) VerifySignatureWithOptions(context, input []byte, sig string, o return false, err } + key := keyEntry.RSAKey + algo, ok := CryptoHashMap[hashAlgorithm] if !ok { return false, errutil.InternalError{Err: "unsupported hash algorithm"} @@ -1423,20 +1371,12 @@ func (p *Policy) VerifySignatureWithOptions(context, input []byte, sig string, o switch sigAlgorithm { case "pss": - publicKey := keyEntry.RSAPublicKey - if !keyEntry.IsPrivateKeyMissing() { - publicKey = &keyEntry.RSAKey.PublicKey - } - if !p.validRSAPSSSaltLength(publicKey.N.BitLen(), algo, saltLength) { + if !p.validRSAPSSSaltLength(key, algo, saltLength) { return false, errutil.UserError{Err: fmt.Sprintf("requested salt length %d is invalid", saltLength)} } - err = rsa.VerifyPSS(publicKey, algo, input, sigBytes, &rsa.PSSOptions{SaltLength: saltLength}) + err = rsa.VerifyPSS(&key.PublicKey, algo, input, sigBytes, &rsa.PSSOptions{SaltLength: saltLength}) case "pkcs1v15": - publicKey := keyEntry.RSAPublicKey - if !keyEntry.IsPrivateKeyMissing() { - publicKey = &keyEntry.RSAKey.PublicKey - } - err = rsa.VerifyPKCS1v15(publicKey, algo, input, sigBytes) + err = rsa.VerifyPKCS1v15(&key.PublicKey, algo, input, sigBytes) default: return false, errutil.InternalError{Err: fmt.Sprintf("unsupported rsa signature algorithm %s", sigAlgorithm)} } @@ -1457,28 +1397,12 @@ func (p *Policy) VerifySignatureWithOptions(context, input []byte, sig string, o } func (p *Policy) Import(ctx context.Context, storage logical.Storage, key []byte, randReader io.Reader) error { - return p.ImportPublicOrPrivate(ctx, storage, key, true, randReader) -} - -func (p *Policy) ImportPublicOrPrivate(ctx context.Context, storage logical.Storage, key []byte, isPrivateKey bool, randReader io.Reader) error { now := time.Now() entry := KeyEntry{ CreationTime: now, DeprecatedCreationTime: now.Unix(), } - // Before we insert this entry, check if the latest version is incomplete - // and this entry matches the current version; if so, return without - // updating to the next version. - if p.LatestVersion > 0 { - latestKey := p.Keys[strconv.Itoa(p.LatestVersion)] - if latestKey.IsPrivateKeyMissing() && isPrivateKey { - if err := p.ImportPrivateKeyForVersion(ctx, storage, p.LatestVersion, key); err == nil { - return nil - } - } - } - if p.Type != KeyType_HMAC { hmacKey, err := uuid.GenerateRandomBytesWithReader(32, randReader) if err != nil { @@ -1487,10 +1411,6 @@ func (p *Policy) ImportPublicOrPrivate(ctx context.Context, storage logical.Stor entry.HMACKey = hmacKey } - if p.Type == KeyType_ED25519 && p.Derived && !isPrivateKey { - return fmt.Errorf("unable to import only public key for derived Ed25519 key: imported key should not be an Ed25519 key pair but is instead an HKDF key") - } - if (p.Type == KeyType_AES128_GCM96 && len(key) != 16) || ((p.Type == KeyType_AES256_GCM96 || p.Type == KeyType_ChaCha20_Poly1305) && len(key) != 32) || (p.Type == KeyType_HMAC && (len(key) < HmacMinKeySize || len(key) > HmacMaxKeySize)) { @@ -1504,42 +1424,83 @@ func (p *Policy) ImportPublicOrPrivate(ctx context.Context, storage logical.Stor entry.HMACKey = key } } else { - var parsedKey any - var err error - if isPrivateKey { - parsedKey, err = x509.ParsePKCS8PrivateKey(key) - if err != nil { - if strings.Contains(err.Error(), "unknown elliptic curve") { - var edErr error - parsedKey, edErr = ParsePKCS8Ed25519PrivateKey(key) - if edErr != nil { - return fmt.Errorf("error parsing asymmetric key:\n - assuming contents are an ed25519 private key: %s\n - original error: %v", edErr, err) - } - - // Parsing as Ed25519-in-PKCS8-ECPrivateKey succeeded! - } else if strings.Contains(err.Error(), oidSignatureRSAPSS.String()) { - var rsaErr error - parsedKey, rsaErr = ParsePKCS8RSAPSSPrivateKey(key) - if rsaErr != nil { - return fmt.Errorf("error parsing asymmetric key:\n - assuming contents are an RSA/PSS private key: %v\n - original error: %w", rsaErr, err) - } - - // Parsing as RSA-PSS in PKCS8 succeeded! - } else { - return fmt.Errorf("error parsing asymmetric key: %s", err) + parsedPrivateKey, err := x509.ParsePKCS8PrivateKey(key) + if err != nil { + if strings.Contains(err.Error(), "unknown elliptic curve") { + var edErr error + parsedPrivateKey, edErr = ParsePKCS8Ed25519PrivateKey(key) + if edErr != nil { + return fmt.Errorf("error parsing asymmetric key:\n - assuming contents are an ed25519 private key: %s\n - original error: %v", edErr, err) } + + // Parsing as Ed25519-in-PKCS8-ECPrivateKey succeeded! + } else { + return fmt.Errorf("error parsing asymmetric key: %s", err) } - } else { - pemBlock, _ := pem.Decode(key) - parsedKey, err = x509.ParsePKIXPublicKey(pemBlock.Bytes) + } + + switch parsedPrivateKey.(type) { + case *ecdsa.PrivateKey: + if p.Type != KeyType_ECDSA_P256 && p.Type != KeyType_ECDSA_P384 && p.Type != KeyType_ECDSA_P521 { + return fmt.Errorf("invalid key type: expected %s, got %T", p.Type, parsedPrivateKey) + } + + ecdsaKey := parsedPrivateKey.(*ecdsa.PrivateKey) + curve := elliptic.P256() + if p.Type == KeyType_ECDSA_P384 { + curve = elliptic.P384() + } else if p.Type == KeyType_ECDSA_P521 { + curve = elliptic.P521() + } + + if ecdsaKey.Curve != curve { + return fmt.Errorf("invalid curve: expected %s, got %s", curve.Params().Name, ecdsaKey.Curve.Params().Name) + } + + entry.EC_D = ecdsaKey.D + entry.EC_X = ecdsaKey.X + entry.EC_Y = ecdsaKey.Y + derBytes, err := x509.MarshalPKIXPublicKey(ecdsaKey.Public()) if err != nil { - return fmt.Errorf("error parsing public key: %s", err) + return errwrap.Wrapf("error marshaling public key: {{err}}", err) } - } + pemBlock := &pem.Block{ + Type: "PUBLIC KEY", + Bytes: derBytes, + } + pemBytes := pem.EncodeToMemory(pemBlock) + if pemBytes == nil || len(pemBytes) == 0 { + return fmt.Errorf("error PEM-encoding public key") + } + entry.FormattedPublicKey = string(pemBytes) + case ed25519.PrivateKey: + if p.Type != KeyType_ED25519 { + return fmt.Errorf("invalid key type: expected %s, got %T", p.Type, parsedPrivateKey) + } + privateKey := parsedPrivateKey.(ed25519.PrivateKey) - err = entry.parseFromKey(p.Type, parsedKey) - if err != nil { - return err + entry.Key = privateKey + publicKey := privateKey.Public().(ed25519.PublicKey) + entry.FormattedPublicKey = base64.StdEncoding.EncodeToString(publicKey) + case *rsa.PrivateKey: + if p.Type != KeyType_RSA2048 && p.Type != KeyType_RSA3072 && p.Type != KeyType_RSA4096 { + return fmt.Errorf("invalid key type: expected %s, got %T", p.Type, parsedPrivateKey) + } + + keyBytes := 256 + if p.Type == KeyType_RSA3072 { + keyBytes = 384 + } else if p.Type == KeyType_RSA4096 { + keyBytes = 512 + } + rsaKey := parsedPrivateKey.(*rsa.PrivateKey) + if rsaKey.Size() != keyBytes { + return fmt.Errorf("invalid key size: expected %d bytes, got %d bytes", keyBytes, rsaKey.Size()) + } + + entry.RSAKey = rsaKey + default: + return fmt.Errorf("invalid key type: expected %s, got %T", p.Type, parsedPrivateKey) } } @@ -1667,19 +1628,13 @@ func (p *Policy) RotateInMemory(randReader io.Reader) (retErr error) { entry.FormattedPublicKey = string(pemBytes) case KeyType_ED25519: - // Go uses a 64-byte private key for Ed25519 keys (private+public, each - // 32-bytes long). When we do Key derivation, we still generate a 32-byte - // random value (and compute the corresponding Ed25519 public key), but - // use this entire 64-byte key as if it was an HKDF key. The corresponding - // underlying public key is never returned (which is probably good, because - // doing so would leak half of our HKDF key...), but means we cannot import - // derived-enabled Ed25519 public key components. pub, pri, err := ed25519.GenerateKey(randReader) if err != nil { return err } entry.Key = pri entry.FormattedPublicKey = base64.StdEncoding.EncodeToString(pub) + case KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096: bitSize := 2048 if p.Type == KeyType_RSA3072 { @@ -2065,13 +2020,8 @@ func (p *Policy) EncryptWithFactory(ver int, context []byte, nonce []byte, value if err != nil { return "", err } - var publicKey *rsa.PublicKey - if keyEntry.RSAKey != nil { - publicKey = &keyEntry.RSAKey.PublicKey - } else { - publicKey = keyEntry.RSAPublicKey - } - ciphertext, err = rsa.EncryptOAEP(sha256.New(), rand.Reader, publicKey, plaintext, nil) + key := keyEntry.RSAKey + ciphertext, err = rsa.EncryptOAEP(sha256.New(), rand.Reader, &key.PublicKey, plaintext, nil) if err != nil { return "", errutil.InternalError{Err: fmt.Sprintf("failed to RSA encrypt the plaintext: %v", err)} } @@ -2116,280 +2066,3 @@ func (p *Policy) EncryptWithFactory(ver int, context []byte, nonce []byte, value return encoded, nil } - -func (p *Policy) KeyVersionCanBeUpdated(keyVersion int, isPrivateKey bool) error { - keyEntry, err := p.safeGetKeyEntry(keyVersion) - if err != nil { - return err - } - - if !p.Type.ImportPublicKeySupported() { - return errors.New("provided type does not support importing key versions") - } - - isPrivateKeyMissing := keyEntry.IsPrivateKeyMissing() - if isPrivateKeyMissing && !isPrivateKey { - return errors.New("cannot add a public key to a key version that already has a public key set") - } - - if !isPrivateKeyMissing { - return errors.New("private key imported, key version cannot be updated") - } - - return nil -} - -func (p *Policy) ImportPrivateKeyForVersion(ctx context.Context, storage logical.Storage, keyVersion int, key []byte) error { - keyEntry, err := p.safeGetKeyEntry(keyVersion) - if err != nil { - return err - } - - // Parse key - parsedPrivateKey, err := x509.ParsePKCS8PrivateKey(key) - if err != nil { - if strings.Contains(err.Error(), "unknown elliptic curve") { - var edErr error - parsedPrivateKey, edErr = ParsePKCS8Ed25519PrivateKey(key) - if edErr != nil { - return fmt.Errorf("error parsing asymmetric key:\n - assuming contents are an ed25519 private key: %s\n - original error: %v", edErr, err) - } - - // Parsing as Ed25519-in-PKCS8-ECPrivateKey succeeded! - } else if strings.Contains(err.Error(), oidSignatureRSAPSS.String()) { - var rsaErr error - parsedPrivateKey, rsaErr = ParsePKCS8RSAPSSPrivateKey(key) - if rsaErr != nil { - return fmt.Errorf("error parsing asymmetric key:\n - assuming contents are an RSA/PSS private key: %v\n - original error: %w", rsaErr, err) - } - - // Parsing as RSA-PSS in PKCS8 succeeded! - } else { - return fmt.Errorf("error parsing asymmetric key: %s", err) - } - } - - switch parsedPrivateKey.(type) { - case *ecdsa.PrivateKey: - ecdsaKey := parsedPrivateKey.(*ecdsa.PrivateKey) - pemBlock, _ := pem.Decode([]byte(keyEntry.FormattedPublicKey)) - publicKey, err := x509.ParsePKIXPublicKey(pemBlock.Bytes) - if err != nil || publicKey == nil { - return fmt.Errorf("failed to parse key entry public key: %v", err) - } - if !publicKey.(*ecdsa.PublicKey).Equal(&ecdsaKey.PublicKey) { - return fmt.Errorf("cannot import key, key pair does not match") - } - case *rsa.PrivateKey: - rsaKey := parsedPrivateKey.(*rsa.PrivateKey) - if !rsaKey.PublicKey.Equal(keyEntry.RSAPublicKey) { - return fmt.Errorf("cannot import key, key pair does not match") - } - case ed25519.PrivateKey: - ed25519Key := parsedPrivateKey.(ed25519.PrivateKey) - publicKey, err := base64.StdEncoding.DecodeString(keyEntry.FormattedPublicKey) - if err != nil { - return fmt.Errorf("failed to parse key entry public key: %v", err) - } - if !ed25519.PublicKey(publicKey).Equal(ed25519Key.Public()) { - return fmt.Errorf("cannot import key, key pair does not match") - } - } - - err = keyEntry.parseFromKey(p.Type, parsedPrivateKey) - if err != nil { - return err - } - - p.Keys[strconv.Itoa(keyVersion)] = keyEntry - - return p.Persist(ctx, storage) -} - -func (ke *KeyEntry) parseFromKey(PolKeyType KeyType, parsedKey any) error { - switch parsedKey.(type) { - case *ecdsa.PrivateKey, *ecdsa.PublicKey: - if PolKeyType != KeyType_ECDSA_P256 && PolKeyType != KeyType_ECDSA_P384 && PolKeyType != KeyType_ECDSA_P521 { - return fmt.Errorf("invalid key type: expected %s, got %T", PolKeyType, parsedKey) - } - - curve := elliptic.P256() - if PolKeyType == KeyType_ECDSA_P384 { - curve = elliptic.P384() - } else if PolKeyType == KeyType_ECDSA_P521 { - curve = elliptic.P521() - } - - var derBytes []byte - var err error - ecdsaKey, ok := parsedKey.(*ecdsa.PrivateKey) - if ok { - - if ecdsaKey.Curve != curve { - return fmt.Errorf("invalid curve: expected %s, got %s", curve.Params().Name, ecdsaKey.Curve.Params().Name) - } - - ke.EC_D = ecdsaKey.D - ke.EC_X = ecdsaKey.X - ke.EC_Y = ecdsaKey.Y - - derBytes, err = x509.MarshalPKIXPublicKey(ecdsaKey.Public()) - if err != nil { - return errwrap.Wrapf("error marshaling public key: {{err}}", err) - } - } else { - ecdsaKey := parsedKey.(*ecdsa.PublicKey) - - if ecdsaKey.Curve != curve { - return fmt.Errorf("invalid curve: expected %s, got %s", curve.Params().Name, ecdsaKey.Curve.Params().Name) - } - - ke.EC_X = ecdsaKey.X - ke.EC_Y = ecdsaKey.Y - - derBytes, err = x509.MarshalPKIXPublicKey(ecdsaKey) - if err != nil { - return errwrap.Wrapf("error marshaling public key: {{err}}", err) - } - } - - pemBlock := &pem.Block{ - Type: "PUBLIC KEY", - Bytes: derBytes, - } - pemBytes := pem.EncodeToMemory(pemBlock) - if pemBytes == nil || len(pemBytes) == 0 { - return fmt.Errorf("error PEM-encoding public key") - } - ke.FormattedPublicKey = string(pemBytes) - case ed25519.PrivateKey, ed25519.PublicKey: - if PolKeyType != KeyType_ED25519 { - return fmt.Errorf("invalid key type: expected %s, got %T", PolKeyType, parsedKey) - } - - privateKey, ok := parsedKey.(ed25519.PrivateKey) - if ok { - ke.Key = privateKey - publicKey := privateKey.Public().(ed25519.PublicKey) - ke.FormattedPublicKey = base64.StdEncoding.EncodeToString(publicKey) - } else { - publicKey := parsedKey.(ed25519.PublicKey) - ke.FormattedPublicKey = base64.StdEncoding.EncodeToString(publicKey) - } - case *rsa.PrivateKey, *rsa.PublicKey: - if PolKeyType != KeyType_RSA2048 && PolKeyType != KeyType_RSA3072 && PolKeyType != KeyType_RSA4096 { - return fmt.Errorf("invalid key type: expected %s, got %T", PolKeyType, parsedKey) - } - - keyBytes := 256 - if PolKeyType == KeyType_RSA3072 { - keyBytes = 384 - } else if PolKeyType == KeyType_RSA4096 { - keyBytes = 512 - } - - rsaKey, ok := parsedKey.(*rsa.PrivateKey) - if ok { - if rsaKey.Size() != keyBytes { - return fmt.Errorf("invalid key size: expected %d bytes, got %d bytes", keyBytes, rsaKey.Size()) - } - ke.RSAKey = rsaKey - ke.RSAPublicKey = rsaKey.Public().(*rsa.PublicKey) - } else { - rsaKey := parsedKey.(*rsa.PublicKey) - if rsaKey.Size() != keyBytes { - return fmt.Errorf("invalid key size: expected %d bytes, got %d bytes", keyBytes, rsaKey.Size()) - } - ke.RSAPublicKey = rsaKey - } - default: - return fmt.Errorf("invalid key type: expected %s, got %T", PolKeyType, parsedKey) - } - - return nil -} - -func (p *Policy) WrapKey(ver int, targetKey interface{}, targetKeyType KeyType, hash hash.Hash) (string, error) { - if !p.Type.SigningSupported() { - return "", fmt.Errorf("message signing not supported for key type %v", p.Type) - } - - switch { - case ver == 0: - ver = p.LatestVersion - case ver < 0: - return "", errutil.UserError{Err: "requested version for key wrapping is negative"} - case ver > p.LatestVersion: - return "", errutil.UserError{Err: "requested version for key wrapping is higher than the latest key version"} - case p.MinEncryptionVersion > 0 && ver < p.MinEncryptionVersion: - return "", errutil.UserError{Err: "requested version for key wrapping is less than the minimum encryption key version"} - } - - keyEntry, err := p.safeGetKeyEntry(ver) - if err != nil { - return "", err - } - - return keyEntry.WrapKey(targetKey, targetKeyType, hash) -} - -func (ke *KeyEntry) WrapKey(targetKey interface{}, targetKeyType KeyType, hash hash.Hash) (string, error) { - // Presently this method implements a CKM_RSA_AES_KEY_WRAP-compatible - // wrapping interface and only works on RSA keyEntries as a result. - if ke.RSAPublicKey == nil { - return "", fmt.Errorf("unsupported key type in use; must be a rsa key") - } - - var preppedTargetKey []byte - switch targetKeyType { - case KeyType_AES128_GCM96, KeyType_AES256_GCM96, KeyType_ChaCha20_Poly1305, KeyType_HMAC: - var ok bool - preppedTargetKey, ok = targetKey.([]byte) - if !ok { - return "", fmt.Errorf("failed to wrap target key for import: symmetric key not provided in byte format (%T)", targetKey) - } - default: - var err error - preppedTargetKey, err = x509.MarshalPKCS8PrivateKey(targetKey) - if err != nil { - return "", fmt.Errorf("failed to wrap target key for import: %w", err) - } - } - - result, err := wrapTargetPKCS8ForImport(ke.RSAPublicKey, preppedTargetKey, hash) - if err != nil { - return result, fmt.Errorf("failed to wrap target key for import: %w", err) - } - - return result, nil -} - -func wrapTargetPKCS8ForImport(wrappingKey *rsa.PublicKey, preppedTargetKey []byte, hash hash.Hash) (string, error) { - // Generate an ephemeral AES-256 key - ephKey, err := uuid.GenerateRandomBytes(32) - if err != nil { - return "", fmt.Errorf("failed to generate an ephemeral AES wrapping key: %w", err) - } - - // Wrap ephemeral AES key with public wrapping key - ephKeyWrapped, err := rsa.EncryptOAEP(hash, rand.Reader, wrappingKey, ephKey, []byte{} /* label */) - if err != nil { - return "", fmt.Errorf("failed to encrypt ephemeral wrapping key with public key: %w", err) - } - - // Create KWP instance for wrapping target key - kwp, err := subtle.NewKWP(ephKey) - if err != nil { - return "", fmt.Errorf("failed to generate new KWP from AES key: %w", err) - } - - // Wrap target key with KWP - targetKeyWrapped, err := kwp.Wrap(preppedTargetKey) - if err != nil { - return "", fmt.Errorf("failed to wrap target key with KWP: %w", err) - } - - // Combined wrapped keys into a single blob and base64 encode - wrappedKeys := append(ephKeyWrapped, targetKeyWrapped...) - return base64.StdEncoding.EncodeToString(wrappedKeys), nil -} diff --git a/sdk/helper/keysutil/policy_test.go b/sdk/helper/keysutil/policy_test.go index f5e4d35eb81c42..2df73971a2b1c2 100644 --- a/sdk/helper/keysutil/policy_test.go +++ b/sdk/helper/keysutil/policy_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import ( @@ -846,7 +843,7 @@ func Test_RSA_PSS(t *testing.T) { } cryptoHash := CryptoHashMap[hashType] minSaltLength := p.minRSAPSSSaltLength() - maxSaltLength := p.maxRSAPSSSaltLength(rsaKey.N.BitLen(), cryptoHash) + maxSaltLength := p.maxRSAPSSSaltLength(rsaKey, cryptoHash) hash := cryptoHash.New() hash.Write(input) input = hash.Sum(nil) diff --git a/sdk/helper/keysutil/transit_lru.go b/sdk/helper/keysutil/transit_lru.go index 66ea66dc74e238..cd1f6dafe693e2 100644 --- a/sdk/helper/keysutil/transit_lru.go +++ b/sdk/helper/keysutil/transit_lru.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import lru "github.com/hashicorp/golang-lru" diff --git a/sdk/helper/keysutil/transit_syncmap.go b/sdk/helper/keysutil/transit_syncmap.go index fddcf706b2f126..ce9071380a9961 100644 --- a/sdk/helper/keysutil/transit_syncmap.go +++ b/sdk/helper/keysutil/transit_syncmap.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import ( diff --git a/sdk/helper/keysutil/util.go b/sdk/helper/keysutil/util.go index 94a56d42c5735c..063af5914672dd 100644 --- a/sdk/helper/keysutil/util.go +++ b/sdk/helper/keysutil/util.go @@ -1,10 +1,6 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package keysutil import ( - "crypto/x509" "crypto/x509/pkix" "encoding/asn1" "errors" @@ -57,9 +53,6 @@ var ( // Other implementations may use the OID 1.3.101.110 from // https://datatracker.ietf.org/doc/html/rfc8410. oidRFC8410Ed25519 = asn1.ObjectIdentifier{1, 3, 101, 110} - - // See crypto/x509/x509.go in the Go toolchain source distribution. - oidSignatureRSAPSS = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 10} ) func isEd25519OID(oid asn1.ObjectIdentifier) bool { @@ -120,32 +113,3 @@ func ParsePKCS8Ed25519PrivateKey(der []byte) (key interface{}, err error) { return ed25519.NewKeyFromSeed(ed25519Key.PrivateKey), nil } - -// ParsePKCS8PrivateKey parses an unencrypted private key in PKCS #8, ASN.1 DER form. -// -// This helper only supports RSA/PSS keys (with OID 1.2.840.113549.1.1.10). -// -// It returns a *rsa.PrivateKey, a *ecdsa.PrivateKey, or a ed25519.PrivateKey. -// More types might be supported in the future. -// -// This kind of key is commonly encoded in PEM blocks of type "PRIVATE KEY". -func ParsePKCS8RSAPSSPrivateKey(der []byte) (key interface{}, err error) { - var privKey pkcs8 - if _, err := asn1.Unmarshal(der, &privKey); err == nil { - switch { - case privKey.Algo.Algorithm.Equal(oidSignatureRSAPSS): - // Fall through; there's no parameters here unlike ECDSA - // containers, so we can go to parsing the inner rsaPrivateKey - // object. - default: - return nil, errors.New("keysutil: failed to parse key as RSA PSS private key") - } - } - - key, err = x509.ParsePKCS1PrivateKey(privKey.PrivateKey) - if err != nil { - return nil, fmt.Errorf("keysutil: failed to parse inner RSA PSS private key: %w", err) - } - - return key, nil -} diff --git a/sdk/helper/ldaputil/client.go b/sdk/helper/ldaputil/client.go index c3562a5d406dfa..bbdca9a4e6a8b6 100644 --- a/sdk/helper/ldaputil/client.go +++ b/sdk/helper/ldaputil/client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldaputil import ( @@ -139,11 +136,10 @@ func (c *Client) makeLdapSearchRequest(cfg *ConfigEntry, conn Connection, userna c.Logger.Debug("discovering user", "userdn", cfg.UserDN, "filter", renderedFilter) } ldapRequest := &ldap.SearchRequest{ - BaseDN: cfg.UserDN, - DerefAliases: ldapDerefAliasMap[cfg.DerefAliases], - Scope: ldap.ScopeWholeSubtree, - Filter: renderedFilter, - SizeLimit: 2, // Should be only 1 result. Any number larger (2 or more) means access denied. + BaseDN: cfg.UserDN, + Scope: ldap.ScopeWholeSubtree, + Filter: renderedFilter, + SizeLimit: 2, // Should be only 1 result. Any number larger (2 or more) means access denied. Attributes: []string{ cfg.UserAttr, // Return only needed attributes }, @@ -230,10 +226,6 @@ func (c *Client) RenderUserSearchFilter(cfg *ConfigEntry, username string) (stri context.Username = fmt.Sprintf("%s@%s", EscapeLDAPValue(username), cfg.UPNDomain) } - // Execute the template. Note that the template context contains escaped input and does - // not provide behavior via functions. Additionally, no function map has been provided - // during template initialization. The only template functions available during execution - // are the predefined global functions: https://pkg.go.dev/text/template#hdr-Functions var renderedFilter bytes.Buffer if err := t.Execute(&renderedFilter, context); err != nil { return "", fmt.Errorf("LDAP search failed due to template parsing error: %w", err) @@ -299,11 +291,10 @@ func (c *Client) GetUserDN(cfg *ConfigEntry, conn Connection, bindDN, username s c.Logger.Debug("searching upn", "userdn", cfg.UserDN, "filter", filter) } result, err := conn.Search(&ldap.SearchRequest{ - BaseDN: cfg.UserDN, - Scope: ldap.ScopeWholeSubtree, - DerefAliases: ldapDerefAliasMap[cfg.DerefAliases], - Filter: filter, - SizeLimit: math.MaxInt32, + BaseDN: cfg.UserDN, + Scope: ldap.ScopeWholeSubtree, + Filter: filter, + SizeLimit: math.MaxInt32, }) if err != nil { return userDN, fmt.Errorf("LDAP search failed for detecting user: %w", err) @@ -361,10 +352,9 @@ func (c *Client) performLdapFilterGroupsSearch(cfg *ConfigEntry, conn Connection } result, err := conn.Search(&ldap.SearchRequest{ - BaseDN: cfg.GroupDN, - Scope: ldap.ScopeWholeSubtree, - DerefAliases: ldapDerefAliasMap[cfg.DerefAliases], - Filter: renderedQuery.String(), + BaseDN: cfg.GroupDN, + Scope: ldap.ScopeWholeSubtree, + Filter: renderedQuery.String(), Attributes: []string{ cfg.GroupAttr, }, @@ -410,10 +400,6 @@ func (c *Client) performLdapFilterGroupsSearchPaging(cfg *ConfigEntry, conn Pagi ldap.EscapeFilter(username), } - // Execute the template. Note that the template context contains escaped input and does - // not provide behavior via functions. Additionally, no function map has been provided - // during template initialization. The only template functions available during execution - // are the predefined global functions: https://pkg.go.dev/text/template#hdr-Functions var renderedQuery bytes.Buffer if err := t.Execute(&renderedQuery, context); err != nil { return nil, fmt.Errorf("LDAP search failed due to template parsing error: %w", err) @@ -424,10 +410,9 @@ func (c *Client) performLdapFilterGroupsSearchPaging(cfg *ConfigEntry, conn Pagi } result, err := conn.SearchWithPaging(&ldap.SearchRequest{ - BaseDN: cfg.GroupDN, - Scope: ldap.ScopeWholeSubtree, - DerefAliases: ldapDerefAliasMap[cfg.DerefAliases], - Filter: renderedQuery.String(), + BaseDN: cfg.GroupDN, + Scope: ldap.ScopeWholeSubtree, + Filter: renderedQuery.String(), Attributes: []string{ cfg.GroupAttr, }, @@ -474,10 +459,9 @@ func sidBytesToString(b []byte) (string, error) { func (c *Client) performLdapTokenGroupsSearch(cfg *ConfigEntry, conn Connection, userDN string) ([]*ldap.Entry, error) { result, err := conn.Search(&ldap.SearchRequest{ - BaseDN: userDN, - Scope: ldap.ScopeBaseObject, - DerefAliases: ldapDerefAliasMap[cfg.DerefAliases], - Filter: "(objectClass=*)", + BaseDN: userDN, + Scope: ldap.ScopeBaseObject, + Filter: "(objectClass=*)", Attributes: []string{ "tokenGroups", }, @@ -503,10 +487,9 @@ func (c *Client) performLdapTokenGroupsSearch(cfg *ConfigEntry, conn Connection, } groupResult, err := conn.Search(&ldap.SearchRequest{ - BaseDN: fmt.Sprintf("", sidString), - Scope: ldap.ScopeBaseObject, - DerefAliases: ldapDerefAliasMap[cfg.DerefAliases], - Filter: "(objectClass=*)", + BaseDN: fmt.Sprintf("", sidString), + Scope: ldap.ScopeBaseObject, + Filter: "(objectClass=*)", Attributes: []string{ "1.1", // RFC no attributes }, diff --git a/sdk/helper/ldaputil/client_test.go b/sdk/helper/ldaputil/client_test.go index 167d50f22d6282..c9ae9cd4baa5a9 100644 --- a/sdk/helper/ldaputil/client_test.go +++ b/sdk/helper/ldaputil/client_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldaputil import ( diff --git a/sdk/helper/ldaputil/config.go b/sdk/helper/ldaputil/config.go index dfa34daddfd08f..041ed0704ae055 100644 --- a/sdk/helper/ldaputil/config.go +++ b/sdk/helper/ldaputil/config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldaputil import ( @@ -16,17 +13,8 @@ import ( "github.com/hashicorp/vault/sdk/framework" "github.com/hashicorp/errwrap" - - "github.com/go-ldap/ldap/v3" ) -var ldapDerefAliasMap = map[string]int{ - "never": ldap.NeverDerefAliases, - "finding": ldap.DerefFindingBaseObj, - "searching": ldap.DerefInSearching, - "always": ldap.DerefAlways, -} - // ConfigFields returns all the config fields that can potentially be used by the LDAP client. // Not all fields will be used by every integration. func ConfigFields() map[string]*framework.FieldSchema { @@ -245,13 +233,6 @@ Default: ({{.UserAttr}}={{.Username}})`, Default: "30s", }, - "dereference_aliases": { - Type: framework.TypeString, - Description: "When aliases should be dereferenced on search operations. Accepted values are 'never', 'finding', 'searching', 'always'. Defaults to 'never'.", - Default: "never", - AllowedValues: []interface{}{"never", "finding", "searching", "always"}, - }, - "max_page_size": { Type: framework.TypeInt, Description: "If set to a value greater than 0, the LDAP backend will use the LDAP server's paged search control to request pages of up to the given size. This can be used to avoid hitting the LDAP server's maximum result size limit. Otherwise, the LDAP backend will not use the paged search control.", @@ -427,10 +408,6 @@ func NewConfigEntry(existing *ConfigEntry, d *framework.FieldData) (*ConfigEntry cfg.ConnectionTimeout = d.Get("connection_timeout").(int) } - if _, ok := d.Raw["dereference_aliases"]; ok || !hadExisting { - cfg.DerefAliases = d.Get("dereference_aliases").(string) - } - if _, ok := d.Raw["max_page_size"]; ok || !hadExisting { cfg.MaximumPageSize = d.Get("max_page_size").(int) } @@ -462,7 +439,6 @@ type ConfigEntry struct { UsePre111GroupCNBehavior *bool `json:"use_pre111_group_cn_behavior"` RequestTimeout int `json:"request_timeout"` ConnectionTimeout int `json:"connection_timeout"` - DerefAliases string `json:"dereference_aliases"` MaximumPageSize int `json:"max_page_size"` // These json tags deviate from snake case because there was a past issue @@ -503,7 +479,6 @@ func (c *ConfigEntry) PasswordlessMap() map[string]interface{} { "request_timeout": c.RequestTimeout, "connection_timeout": c.ConnectionTimeout, "username_as_alias": c.UsernameAsAlias, - "dereference_aliases": c.DerefAliases, "max_page_size": c.MaximumPageSize, } if c.CaseSensitiveNames != nil { diff --git a/sdk/helper/ldaputil/config_test.go b/sdk/helper/ldaputil/config_test.go index b7fd22ccbb2df4..c0a57253ce7dfb 100644 --- a/sdk/helper/ldaputil/config_test.go +++ b/sdk/helper/ldaputil/config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldaputil import ( @@ -174,7 +171,6 @@ var jsonConfigDefault = []byte(` "username_as_alias": false, "request_timeout": 90, "connection_timeout": 30, - "dereference_aliases": "never", "max_page_size": 0, "CaseSensitiveNames": false, "ClientTLSCert": "", diff --git a/sdk/helper/ldaputil/connection.go b/sdk/helper/ldaputil/connection.go index c33ad403f78e21..71c83f2f9b3a9f 100644 --- a/sdk/helper/ldaputil/connection.go +++ b/sdk/helper/ldaputil/connection.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldaputil import ( diff --git a/sdk/helper/ldaputil/ldap.go b/sdk/helper/ldaputil/ldap.go index bdf746e5c8cd4b..73e36b230dc0e0 100644 --- a/sdk/helper/ldaputil/ldap.go +++ b/sdk/helper/ldaputil/ldap.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package ldaputil import ( diff --git a/sdk/helper/license/feature.go b/sdk/helper/license/feature.go index b42fcd1fc1a196..c7c000a58a30da 100644 --- a/sdk/helper/license/feature.go +++ b/sdk/helper/license/feature.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package license // Features is a bitmask of feature flags diff --git a/sdk/helper/locksutil/locks.go b/sdk/helper/locksutil/locks.go index c7538b63b4f788..35ffcf739d9dc1 100644 --- a/sdk/helper/locksutil/locks.go +++ b/sdk/helper/locksutil/locks.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package locksutil import ( diff --git a/sdk/helper/locksutil/locks_test.go b/sdk/helper/locksutil/locks_test.go index 954a46349ea7ec..99166446377797 100644 --- a/sdk/helper/locksutil/locks_test.go +++ b/sdk/helper/locksutil/locks_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package locksutil import "testing" diff --git a/sdk/helper/logging/logging.go b/sdk/helper/logging/logging.go index 37dcefa4778369..25de5a7813168e 100644 --- a/sdk/helper/logging/logging.go +++ b/sdk/helper/logging/logging.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logging import ( diff --git a/sdk/helper/logging/logging_test.go b/sdk/helper/logging/logging_test.go index 16075524b0b8c2..91e204b097edc4 100644 --- a/sdk/helper/logging/logging_test.go +++ b/sdk/helper/logging/logging_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logging import ( diff --git a/sdk/helper/mlock/mlock.go b/sdk/helper/mlock/mlock.go index 5820d15af3c1d4..1bbf8a0bbbf83d 100644 --- a/sdk/helper/mlock/mlock.go +++ b/sdk/helper/mlock/mlock.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // DEPRECATED: this has been moved to go-secure-stdlib and will be removed package mlock diff --git a/sdk/helper/ocsp/ocsp_test.go b/sdk/helper/ocsp/ocsp_test.go index 2f3f1976d2a807..892391d29bf4d1 100644 --- a/sdk/helper/ocsp/ocsp_test.go +++ b/sdk/helper/ocsp/ocsp_test.go @@ -8,6 +8,7 @@ import ( "crypto" "crypto/tls" "crypto/x509" + "encoding/pem" "errors" "fmt" "io" @@ -18,9 +19,16 @@ import ( "testing" "time" + "github.com/hashicorp/vault/api" + "github.com/hashicorp/vault/builtin/logical/pki" + vaulthttp "github.com/hashicorp/vault/http" + "github.com/hashicorp/vault/sdk/logical" + "github.com/hashicorp/vault/vault" + "github.com/hashicorp/go-hclog" "github.com/hashicorp/go-retryablehttp" lru "github.com/hashicorp/golang-lru" + "github.com/stretchr/testify/require" "golang.org/x/crypto/ocsp" ) @@ -424,6 +432,165 @@ func TestCanEarlyExitForOCSP(t *testing.T) { } } +func TestWithVaultPKI(t *testing.T) { + t.Parallel() + + coreConfig := &vault.CoreConfig{ + LogicalBackends: map[string]logical.Factory{ + "pki": pki.Factory, + }, + } + cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ + HandlerFunc: vaulthttp.Handler, + }) + + cluster.Start() + defer cluster.Cleanup() + cores := cluster.Cores + vault.TestWaitActive(t, cores[0].Core) + client := cores[0].Client + + err := client.Sys().Mount("pki", &api.MountInput{ + Type: "pki", + Config: api.MountConfigInput{ + DefaultLeaseTTL: "16h", + MaxLeaseTTL: "32h", + }, + }) + require.NoError(t, err) + + resp, err := client.Logical().Write("pki/root/generate/internal", map[string]interface{}{ + "ttl": "40h", + "common_name": "Root R1", + "key_type": "ec", + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + require.NotEmpty(t, resp.Data["issuer_id"]) + rootIssuerId := resp.Data["issuer_id"].(string) + + // Set URLs pointing to the issuer. + _, err = client.Logical().Write("pki/config/cluster", map[string]interface{}{ + "path": client.Address() + "/v1/pki", + "aia_path": client.Address() + "/v1/pki", + }) + require.NoError(t, err) + + _, err = client.Logical().Write("pki/config/urls", map[string]interface{}{ + "enable_templating": true, + "crl_distribution_points": "{{cluster_aia_path}}/issuer/{{issuer_id}}/crl/der", + "issuing_certificates": "{{cluster_aia_path}}/issuer/{{issuer_id}}/der", + "ocsp_servers": "{{cluster_aia_path}}/ocsp", + }) + require.NoError(t, err) + + // Build an intermediate CA + resp, err = client.Logical().Write("pki/intermediate/generate/internal", map[string]interface{}{ + "common_name": "Int X1", + "key_type": "ec", + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + require.NotEmpty(t, resp.Data["csr"]) + intermediateCSR := resp.Data["csr"].(string) + + resp, err = client.Logical().Write("pki/root/sign-intermediate", map[string]interface{}{ + "csr": intermediateCSR, + "ttl": "20h", + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + require.NotEmpty(t, resp.Data["certificate"]) + intermediateCert := resp.Data["certificate"] + + resp, err = client.Logical().Write("pki/intermediate/set-signed", map[string]interface{}{ + "certificate": intermediateCert, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + require.NotEmpty(t, resp.Data["imported_issuers"]) + rawImportedIssuers := resp.Data["imported_issuers"].([]interface{}) + require.Equal(t, len(rawImportedIssuers), 1) + importedIssuer := rawImportedIssuers[0].(string) + require.NotEmpty(t, importedIssuer) + + // Set intermediate as default. + _, err = client.Logical().Write("pki/config/issuers", map[string]interface{}{ + "default": importedIssuer, + }) + require.NoError(t, err) + + // Setup roles for root, intermediate. + _, err = client.Logical().Write("pki/roles/example-root", map[string]interface{}{ + "allowed_domains": "example.com", + "allow_subdomains": "true", + "max_ttl": "1h", + "key_type": "ec", + "issuer_ref": rootIssuerId, + }) + require.NoError(t, err) + + _, err = client.Logical().Write("pki/roles/example-int", map[string]interface{}{ + "allowed_domains": "example.com", + "allow_subdomains": "true", + "max_ttl": "1h", + "key_type": "ec", + }) + require.NoError(t, err) + + // Issue certs and validate them against OCSP. + for _, path := range []string{"pki/issue/example-int", "pki/issue/example-root"} { + t.Logf("Validating against path: %v", path) + resp, err = client.Logical().Write(path, map[string]interface{}{ + "common_name": "test.example.com", + "ttl": "5m", + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + require.NotEmpty(t, resp.Data["certificate"]) + require.NotEmpty(t, resp.Data["issuing_ca"]) + require.NotEmpty(t, resp.Data["serial_number"]) + + certPEM := resp.Data["certificate"].(string) + certBlock, _ := pem.Decode([]byte(certPEM)) + require.NotNil(t, certBlock) + cert, err := x509.ParseCertificate(certBlock.Bytes) + require.NoError(t, err) + require.NotNil(t, cert) + + issuerPEM := resp.Data["issuing_ca"].(string) + issuerBlock, _ := pem.Decode([]byte(issuerPEM)) + require.NotNil(t, issuerBlock) + issuer, err := x509.ParseCertificate(issuerBlock.Bytes) + require.NoError(t, err) + require.NotNil(t, issuer) + + serialNumber := resp.Data["serial_number"].(string) + + conf := &VerifyConfig{ + OcspFailureMode: FailOpenFalse, + ExtraCas: []*x509.Certificate{cluster.CACert}, + } + ocspClient := New(testLogFactory, 10) + + err = ocspClient.VerifyLeafCertificate(context.Background(), cert, issuer, conf) + require.NoError(t, err) + + _, err = client.Logical().Write("pki/revoke", map[string]interface{}{ + "serial_number": serialNumber, + }) + require.NoError(t, err) + + err = ocspClient.VerifyLeafCertificate(context.Background(), cert, issuer, conf) + require.Error(t, err) + } +} + var testLogger = hclog.New(hclog.DefaultOptions) func testLogFactory() hclog.Logger { diff --git a/sdk/helper/parseutil/parseutil.go b/sdk/helper/parseutil/parseutil.go index 5bea8909de1420..eda539424f2d14 100644 --- a/sdk/helper/parseutil/parseutil.go +++ b/sdk/helper/parseutil/parseutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // DEPRECATED: this has been moved to go-secure-stdlib and will be removed package parseutil diff --git a/sdk/helper/password/password.go b/sdk/helper/password/password.go index 931a72cc8bc333..84e6b594d55d22 100644 --- a/sdk/helper/password/password.go +++ b/sdk/helper/password/password.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // DEPRECATED: this has been moved to go-secure-stdlib and will be removed package password diff --git a/sdk/helper/pathmanager/pathmanager.go b/sdk/helper/pathmanager/pathmanager.go index 0d2d60070f7890..e0e39445b2a562 100644 --- a/sdk/helper/pathmanager/pathmanager.go +++ b/sdk/helper/pathmanager/pathmanager.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pathmanager import ( diff --git a/sdk/helper/pathmanager/pathmanager_test.go b/sdk/helper/pathmanager/pathmanager_test.go index 515d830324f1e0..7d6207b625e601 100644 --- a/sdk/helper/pathmanager/pathmanager_test.go +++ b/sdk/helper/pathmanager/pathmanager_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pathmanager import ( diff --git a/sdk/helper/pluginutil/env.go b/sdk/helper/pluginutil/env.go index 1b45ef32dca225..df1fdbeede939d 100644 --- a/sdk/helper/pluginutil/env.go +++ b/sdk/helper/pluginutil/env.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pluginutil import ( diff --git a/sdk/helper/pluginutil/env_test.go b/sdk/helper/pluginutil/env_test.go index 21f77faba6e63a..1d04b327524e58 100644 --- a/sdk/helper/pluginutil/env_test.go +++ b/sdk/helper/pluginutil/env_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pluginutil import ( diff --git a/sdk/helper/pluginutil/multiplexing.go b/sdk/helper/pluginutil/multiplexing.go index 8fc86a4c36c96e..41316ec49df284 100644 --- a/sdk/helper/pluginutil/multiplexing.go +++ b/sdk/helper/pluginutil/multiplexing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pluginutil import ( diff --git a/sdk/helper/pluginutil/multiplexing.pb.go b/sdk/helper/pluginutil/multiplexing.pb.go index d7663b90215f59..e585febc7914db 100644 --- a/sdk/helper/pluginutil/multiplexing.pb.go +++ b/sdk/helper/pluginutil/multiplexing.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/sdk/helper/pluginutil/multiplexing.proto b/sdk/helper/pluginutil/multiplexing.proto index c1a2ca0a4aa498..aa2438b070ffb4 100644 --- a/sdk/helper/pluginutil/multiplexing.proto +++ b/sdk/helper/pluginutil/multiplexing.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; package pluginutil.multiplexing; diff --git a/sdk/helper/pluginutil/multiplexing_test.go b/sdk/helper/pluginutil/multiplexing_test.go index 3f589ffa7cd9e7..125a4a120c624c 100644 --- a/sdk/helper/pluginutil/multiplexing_test.go +++ b/sdk/helper/pluginutil/multiplexing_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pluginutil import ( diff --git a/sdk/helper/pluginutil/run_config.go b/sdk/helper/pluginutil/run_config.go index be34fa9dc09b75..f344ca979931c9 100644 --- a/sdk/helper/pluginutil/run_config.go +++ b/sdk/helper/pluginutil/run_config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pluginutil import ( diff --git a/sdk/helper/pluginutil/run_config_test.go b/sdk/helper/pluginutil/run_config_test.go index e64057783ad0dc..b817ef9551becf 100644 --- a/sdk/helper/pluginutil/run_config_test.go +++ b/sdk/helper/pluginutil/run_config_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pluginutil import ( diff --git a/sdk/helper/pluginutil/runner.go b/sdk/helper/pluginutil/runner.go index 977f95d72208a1..886efe21f1a091 100644 --- a/sdk/helper/pluginutil/runner.go +++ b/sdk/helper/pluginutil/runner.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pluginutil import ( diff --git a/sdk/helper/pluginutil/tls.go b/sdk/helper/pluginutil/tls.go index 21b35d910e798e..c5fff6d701ed76 100644 --- a/sdk/helper/pluginutil/tls.go +++ b/sdk/helper/pluginutil/tls.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pluginutil import ( diff --git a/sdk/helper/pointerutil/pointer.go b/sdk/helper/pointerutil/pointer.go index b4bfe114cfdf93..0f26e7dad660ab 100644 --- a/sdk/helper/pointerutil/pointer.go +++ b/sdk/helper/pointerutil/pointer.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pointerutil import ( diff --git a/sdk/helper/policyutil/policyutil.go b/sdk/helper/policyutil/policyutil.go index a5a8082e13c223..8e5541b1868fc3 100644 --- a/sdk/helper/policyutil/policyutil.go +++ b/sdk/helper/policyutil/policyutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package policyutil import ( diff --git a/sdk/helper/policyutil/policyutil_test.go b/sdk/helper/policyutil/policyutil_test.go index 2280ba93eed87d..4b26483f716afe 100644 --- a/sdk/helper/policyutil/policyutil_test.go +++ b/sdk/helper/policyutil/policyutil_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package policyutil import "testing" diff --git a/sdk/helper/roottoken/decode.go b/sdk/helper/roottoken/decode.go index 9939b67f72f44a..cc9300690a4aec 100644 --- a/sdk/helper/roottoken/decode.go +++ b/sdk/helper/roottoken/decode.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package roottoken import ( diff --git a/sdk/helper/roottoken/encode.go b/sdk/helper/roottoken/encode.go index dbbc90a2afa307..2537d93979064b 100644 --- a/sdk/helper/roottoken/encode.go +++ b/sdk/helper/roottoken/encode.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package roottoken import ( diff --git a/sdk/helper/roottoken/encode_test.go b/sdk/helper/roottoken/encode_test.go index 269bf65b047263..9df26928e29420 100644 --- a/sdk/helper/roottoken/encode_test.go +++ b/sdk/helper/roottoken/encode_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package roottoken import ( diff --git a/sdk/helper/roottoken/otp.go b/sdk/helper/roottoken/otp.go index 4445ec52dc6538..5a12c4f0ae869a 100644 --- a/sdk/helper/roottoken/otp.go +++ b/sdk/helper/roottoken/otp.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package roottoken import ( diff --git a/sdk/helper/roottoken/otp_test.go b/sdk/helper/roottoken/otp_test.go index 53776ec21c8b5b..437e8f3d0f22ec 100644 --- a/sdk/helper/roottoken/otp_test.go +++ b/sdk/helper/roottoken/otp_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package roottoken import ( diff --git a/sdk/helper/salt/salt.go b/sdk/helper/salt/salt.go index 84cbd03556c27b..50e0cad90a6073 100644 --- a/sdk/helper/salt/salt.go +++ b/sdk/helper/salt/salt.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package salt import ( diff --git a/sdk/helper/salt/salt_test.go b/sdk/helper/salt/salt_test.go index 3aec9a27b49901..99fcb06bd053b9 100644 --- a/sdk/helper/salt/salt_test.go +++ b/sdk/helper/salt/salt_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package salt import ( diff --git a/sdk/helper/strutil/strutil.go b/sdk/helper/strutil/strutil.go index a9e506942af59e..09cc9425cb1db6 100644 --- a/sdk/helper/strutil/strutil.go +++ b/sdk/helper/strutil/strutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // DEPRECATED: this has been moved to go-secure-stdlib and will be removed package strutil diff --git a/sdk/helper/template/funcs.go b/sdk/helper/template/funcs.go index 6d68cab3a7e441..ee9927fe15cbbb 100644 --- a/sdk/helper/template/funcs.go +++ b/sdk/helper/template/funcs.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package template import ( diff --git a/sdk/helper/template/funcs_test.go b/sdk/helper/template/funcs_test.go index 4965115960eeb5..f682a96753f2f9 100644 --- a/sdk/helper/template/funcs_test.go +++ b/sdk/helper/template/funcs_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package template import ( diff --git a/sdk/helper/template/template.go b/sdk/helper/template/template.go index dea65f3f5ed36d..2918825b978ec8 100644 --- a/sdk/helper/template/template.go +++ b/sdk/helper/template/template.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package template import ( diff --git a/sdk/helper/template/template_test.go b/sdk/helper/template/template_test.go index 2f66bf36fe035b..715dd52519e816 100644 --- a/sdk/helper/template/template_test.go +++ b/sdk/helper/template/template_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package template import ( diff --git a/sdk/helper/testcluster/consts.go b/sdk/helper/testcluster/consts.go deleted file mode 100644 index e85ef196a22152..00000000000000 --- a/sdk/helper/testcluster/consts.go +++ /dev/null @@ -1,12 +0,0 @@ -package testcluster - -const ( - // EnvVaultLicenseCI is the name of an environment variable that contains - // a signed license string used for Vault Enterprise binary-based tests. - // The binary will be run with the env var VAULT_LICENSE set to this value. - EnvVaultLicenseCI = "VAULT_LICENSE_CI" - - // DefaultCAFile is the path to the CA file. This is a docker-specific - // constant. TODO: needs to be moved to a more relevant place - DefaultCAFile = "/vault/config/ca.pem" -) diff --git a/sdk/helper/testcluster/docker/cert.go b/sdk/helper/testcluster/docker/cert.go deleted file mode 100644 index 4704030cb52f00..00000000000000 --- a/sdk/helper/testcluster/docker/cert.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package docker - -import ( - "crypto/tls" - "crypto/x509" - "encoding/pem" - "errors" - "fmt" - "io/ioutil" - "sync" - - "github.com/hashicorp/errwrap" -) - -// ReloadFunc are functions that are called when a reload is requested -type ReloadFunc func() error - -// CertificateGetter satisfies ReloadFunc and its GetCertificate method -// satisfies the tls.GetCertificate function signature. Currently it does not -// allow changing paths after the fact. -type CertificateGetter struct { - sync.RWMutex - - cert *tls.Certificate - - certFile string - keyFile string - passphrase string -} - -func NewCertificateGetter(certFile, keyFile, passphrase string) *CertificateGetter { - return &CertificateGetter{ - certFile: certFile, - keyFile: keyFile, - passphrase: passphrase, - } -} - -func (cg *CertificateGetter) Reload() error { - certPEMBlock, err := ioutil.ReadFile(cg.certFile) - if err != nil { - return err - } - keyPEMBlock, err := ioutil.ReadFile(cg.keyFile) - if err != nil { - return err - } - - // Check for encrypted pem block - keyBlock, _ := pem.Decode(keyPEMBlock) - if keyBlock == nil { - return errors.New("decoded PEM is blank") - } - - if x509.IsEncryptedPEMBlock(keyBlock) { - keyBlock.Bytes, err = x509.DecryptPEMBlock(keyBlock, []byte(cg.passphrase)) - if err != nil { - return errwrap.Wrapf("Decrypting PEM block failed {{err}}", err) - } - keyPEMBlock = pem.EncodeToMemory(keyBlock) - } - - cert, err := tls.X509KeyPair(certPEMBlock, keyPEMBlock) - if err != nil { - return err - } - - cg.Lock() - defer cg.Unlock() - - cg.cert = &cert - - return nil -} - -func (cg *CertificateGetter) GetCertificate(clientHello *tls.ClientHelloInfo) (*tls.Certificate, error) { - cg.RLock() - defer cg.RUnlock() - - if cg.cert == nil { - return nil, fmt.Errorf("nil certificate") - } - - return cg.cert, nil -} diff --git a/sdk/helper/testcluster/docker/environment.go b/sdk/helper/testcluster/docker/environment.go deleted file mode 100644 index 204bacbdbddce5..00000000000000 --- a/sdk/helper/testcluster/docker/environment.go +++ /dev/null @@ -1,1062 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package docker - -import ( - "bufio" - "bytes" - "context" - "crypto/ecdsa" - "crypto/elliptic" - "crypto/rand" - "crypto/tls" - "crypto/x509" - "crypto/x509/pkix" - "encoding/hex" - "encoding/json" - "encoding/pem" - "fmt" - "io" - "io/ioutil" - "math/big" - mathrand "math/rand" - "net" - "net/http" - "os" - "path/filepath" - "strings" - "sync" - "testing" - "time" - - "github.com/docker/docker/api/types" - "github.com/docker/docker/api/types/volume" - docker "github.com/docker/docker/client" - "github.com/hashicorp/go-cleanhttp" - log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/go-multierror" - "github.com/hashicorp/vault/api" - dockhelper "github.com/hashicorp/vault/sdk/helper/docker" - "github.com/hashicorp/vault/sdk/helper/logging" - "github.com/hashicorp/vault/sdk/helper/testcluster" - uberAtomic "go.uber.org/atomic" - "golang.org/x/net/http2" -) - -var ( - _ testcluster.VaultCluster = &DockerCluster{} - _ testcluster.VaultClusterNode = &DockerClusterNode{} -) - -const MaxClusterNameLength = 52 - -// DockerCluster is used to managing the lifecycle of the test Vault cluster -type DockerCluster struct { - ClusterName string - - RaftStorage bool - ClusterNodes []*DockerClusterNode - - // Certificate fields - *testcluster.CA - RootCAs *x509.CertPool - - barrierKeys [][]byte - recoveryKeys [][]byte - tmpDir string - - // rootToken is the initial root token created when the Vault cluster is - // created. - rootToken string - DockerAPI *docker.Client - ID string - Logger log.Logger - builtTags map[string]struct{} -} - -func (dc *DockerCluster) NamedLogger(s string) log.Logger { - return dc.Logger.Named(s) -} - -func (dc *DockerCluster) ClusterID() string { - return dc.ID -} - -func (dc *DockerCluster) Nodes() []testcluster.VaultClusterNode { - ret := make([]testcluster.VaultClusterNode, len(dc.ClusterNodes)) - for i := range dc.ClusterNodes { - ret[i] = dc.ClusterNodes[i] - } - return ret -} - -func (dc *DockerCluster) GetBarrierKeys() [][]byte { - return dc.barrierKeys -} - -func testKeyCopy(key []byte) []byte { - result := make([]byte, len(key)) - copy(result, key) - return result -} - -func (dc *DockerCluster) GetRecoveryKeys() [][]byte { - ret := make([][]byte, len(dc.recoveryKeys)) - for i, k := range dc.recoveryKeys { - ret[i] = testKeyCopy(k) - } - return ret -} - -func (dc *DockerCluster) GetBarrierOrRecoveryKeys() [][]byte { - return dc.GetBarrierKeys() -} - -func (dc *DockerCluster) SetBarrierKeys(keys [][]byte) { - dc.barrierKeys = make([][]byte, len(keys)) - for i, k := range keys { - dc.barrierKeys[i] = testKeyCopy(k) - } -} - -func (dc *DockerCluster) SetRecoveryKeys(keys [][]byte) { - dc.recoveryKeys = make([][]byte, len(keys)) - for i, k := range keys { - dc.recoveryKeys[i] = testKeyCopy(k) - } -} - -func (dc *DockerCluster) GetCACertPEMFile() string { - return dc.CACertPEMFile -} - -func (dc *DockerCluster) Cleanup() { - dc.cleanup() -} - -func (dc *DockerCluster) cleanup() error { - var result *multierror.Error - for _, node := range dc.ClusterNodes { - if err := node.cleanup(); err != nil { - result = multierror.Append(result, err) - } - } - - return result.ErrorOrNil() -} - -// GetRootToken returns the root token of the cluster, if set -func (dc *DockerCluster) GetRootToken() string { - return dc.rootToken -} - -func (dc *DockerCluster) SetRootToken(s string) { - dc.Logger.Trace("cluster root token changed", "helpful_env", fmt.Sprintf("VAULT_TOKEN=%s VAULT_CACERT=/vault/config/ca.pem", s)) - dc.rootToken = s -} - -func (n *DockerClusterNode) Name() string { - return n.Cluster.ClusterName + "-" + n.NodeID -} - -func (dc *DockerCluster) setupNode0(ctx context.Context) error { - client := dc.ClusterNodes[0].client - - var resp *api.InitResponse - var err error - for ctx.Err() == nil { - resp, err = client.Sys().Init(&api.InitRequest{ - SecretShares: 3, - SecretThreshold: 3, - }) - if err == nil && resp != nil { - break - } - time.Sleep(500 * time.Millisecond) - } - if err != nil { - return err - } - if resp == nil { - return fmt.Errorf("nil response to init request") - } - - for _, k := range resp.Keys { - raw, err := hex.DecodeString(k) - if err != nil { - return err - } - dc.barrierKeys = append(dc.barrierKeys, raw) - } - - for _, k := range resp.RecoveryKeys { - raw, err := hex.DecodeString(k) - if err != nil { - return err - } - dc.recoveryKeys = append(dc.recoveryKeys, raw) - } - - dc.rootToken = resp.RootToken - client.SetToken(dc.rootToken) - dc.ClusterNodes[0].client = client - - err = testcluster.UnsealNode(ctx, dc, 0) - if err != nil { - return err - } - - err = ensureLeaderMatches(ctx, client, func(leader *api.LeaderResponse) error { - if !leader.IsSelf { - return fmt.Errorf("node %d leader=%v, expected=%v", 0, leader.IsSelf, true) - } - - return nil - }) - - status, err := client.Sys().SealStatusWithContext(ctx) - if err != nil { - return err - } - dc.ID = status.ClusterID - return err -} - -func (dc *DockerCluster) clusterReady(ctx context.Context) error { - for i, node := range dc.ClusterNodes { - expectLeader := i == 0 - err := ensureLeaderMatches(ctx, node.client, func(leader *api.LeaderResponse) error { - if expectLeader != leader.IsSelf { - return fmt.Errorf("node %d leader=%v, expected=%v", i, leader.IsSelf, expectLeader) - } - - return nil - }) - if err != nil { - return err - } - } - - return nil -} - -func (dc *DockerCluster) setupCA(opts *DockerClusterOptions) error { - var err error - var ca testcluster.CA - - if opts != nil && opts.CAKey != nil { - ca.CAKey = opts.CAKey - } else { - ca.CAKey, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - if err != nil { - return err - } - } - - var caBytes []byte - if opts != nil && len(opts.CACert) > 0 { - caBytes = opts.CACert - } else { - serialNumber := mathrand.New(mathrand.NewSource(time.Now().UnixNano())).Int63() - CACertTemplate := &x509.Certificate{ - Subject: pkix.Name{ - CommonName: "localhost", - }, - KeyUsage: x509.KeyUsageCertSign | x509.KeyUsageCRLSign, - SerialNumber: big.NewInt(serialNumber), - NotBefore: time.Now().Add(-30 * time.Second), - NotAfter: time.Now().Add(262980 * time.Hour), - BasicConstraintsValid: true, - IsCA: true, - } - caBytes, err = x509.CreateCertificate(rand.Reader, CACertTemplate, CACertTemplate, ca.CAKey.Public(), ca.CAKey) - if err != nil { - return err - } - } - CACert, err := x509.ParseCertificate(caBytes) - if err != nil { - return err - } - ca.CACert = CACert - ca.CACertBytes = caBytes - - CACertPEMBlock := &pem.Block{ - Type: "CERTIFICATE", - Bytes: caBytes, - } - ca.CACertPEM = pem.EncodeToMemory(CACertPEMBlock) - - ca.CACertPEMFile = filepath.Join(dc.tmpDir, "ca", "ca.pem") - err = os.WriteFile(ca.CACertPEMFile, ca.CACertPEM, 0o755) - if err != nil { - return err - } - - marshaledCAKey, err := x509.MarshalECPrivateKey(ca.CAKey) - if err != nil { - return err - } - CAKeyPEMBlock := &pem.Block{ - Type: "EC PRIVATE KEY", - Bytes: marshaledCAKey, - } - ca.CAKeyPEM = pem.EncodeToMemory(CAKeyPEMBlock) - - dc.CA = &ca - - return nil -} - -func (n *DockerClusterNode) setupCert(ip string) error { - var err error - - n.ServerKey, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - if err != nil { - return err - } - - serialNumber := mathrand.New(mathrand.NewSource(time.Now().UnixNano())).Int63() - certTemplate := &x509.Certificate{ - Subject: pkix.Name{ - CommonName: n.Name(), - }, - DNSNames: []string{"localhost", n.Name()}, - IPAddresses: []net.IP{net.IPv6loopback, net.ParseIP("127.0.0.1"), net.ParseIP(ip)}, - ExtKeyUsage: []x509.ExtKeyUsage{ - x509.ExtKeyUsageServerAuth, - x509.ExtKeyUsageClientAuth, - }, - KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageKeyEncipherment | x509.KeyUsageKeyAgreement, - SerialNumber: big.NewInt(serialNumber), - NotBefore: time.Now().Add(-30 * time.Second), - NotAfter: time.Now().Add(262980 * time.Hour), - } - n.ServerCertBytes, err = x509.CreateCertificate(rand.Reader, certTemplate, n.Cluster.CACert, n.ServerKey.Public(), n.Cluster.CAKey) - if err != nil { - return err - } - n.ServerCert, err = x509.ParseCertificate(n.ServerCertBytes) - if err != nil { - return err - } - n.ServerCertPEM = pem.EncodeToMemory(&pem.Block{ - Type: "CERTIFICATE", - Bytes: n.ServerCertBytes, - }) - - marshaledKey, err := x509.MarshalECPrivateKey(n.ServerKey) - if err != nil { - return err - } - n.ServerKeyPEM = pem.EncodeToMemory(&pem.Block{ - Type: "EC PRIVATE KEY", - Bytes: marshaledKey, - }) - - n.ServerCertPEMFile = filepath.Join(n.WorkDir, "cert.pem") - err = os.WriteFile(n.ServerCertPEMFile, n.ServerCertPEM, 0o755) - if err != nil { - return err - } - - n.ServerKeyPEMFile = filepath.Join(n.WorkDir, "key.pem") - err = os.WriteFile(n.ServerKeyPEMFile, n.ServerKeyPEM, 0o755) - if err != nil { - return err - } - - tlsCert, err := tls.X509KeyPair(n.ServerCertPEM, n.ServerKeyPEM) - if err != nil { - return err - } - - certGetter := NewCertificateGetter(n.ServerCertPEMFile, n.ServerKeyPEMFile, "") - if err := certGetter.Reload(); err != nil { - return err - } - tlsConfig := &tls.Config{ - Certificates: []tls.Certificate{tlsCert}, - RootCAs: n.Cluster.RootCAs, - ClientCAs: n.Cluster.RootCAs, - ClientAuth: tls.RequestClientCert, - NextProtos: []string{"h2", "http/1.1"}, - GetCertificate: certGetter.GetCertificate, - } - - n.tlsConfig = tlsConfig - - err = os.WriteFile(filepath.Join(n.WorkDir, "ca.pem"), n.Cluster.CACertPEM, 0o755) - if err != nil { - return err - } - return nil -} - -func NewTestDockerCluster(t *testing.T, opts *DockerClusterOptions) *DockerCluster { - if opts == nil { - opts = &DockerClusterOptions{} - } - if opts.ClusterName == "" { - opts.ClusterName = strings.ReplaceAll(t.Name(), "/", "-") - } - if opts.Logger == nil { - opts.Logger = logging.NewVaultLogger(log.Trace).Named(t.Name()) - } - if opts.NetworkName == "" { - opts.NetworkName = os.Getenv("TEST_DOCKER_NETWORK_NAME") - } - if opts.VaultLicense == "" { - opts.VaultLicense = os.Getenv(testcluster.EnvVaultLicenseCI) - } - - ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second) - t.Cleanup(cancel) - - dc, err := NewDockerCluster(ctx, opts) - if err != nil { - t.Fatal(err) - } - dc.Logger.Trace("cluster started", "helpful_env", fmt.Sprintf("VAULT_TOKEN=%s VAULT_CACERT=/vault/config/ca.pem", dc.GetRootToken())) - return dc -} - -func NewDockerCluster(ctx context.Context, opts *DockerClusterOptions) (*DockerCluster, error) { - api, err := dockhelper.NewDockerAPI() - if err != nil { - return nil, err - } - - if opts == nil { - opts = &DockerClusterOptions{} - } - if opts.Logger == nil { - opts.Logger = log.NewNullLogger() - } - - dc := &DockerCluster{ - DockerAPI: api, - RaftStorage: true, - ClusterName: opts.ClusterName, - Logger: opts.Logger, - builtTags: map[string]struct{}{}, - CA: opts.CA, - } - - if err := dc.setupDockerCluster(ctx, opts); err != nil { - dc.Cleanup() - return nil, err - } - - return dc, nil -} - -// DockerClusterNode represents a single instance of Vault in a cluster -type DockerClusterNode struct { - NodeID string - HostPort string - client *api.Client - ServerCert *x509.Certificate - ServerCertBytes []byte - ServerCertPEM []byte - ServerCertPEMFile string - ServerKey *ecdsa.PrivateKey - ServerKeyPEM []byte - ServerKeyPEMFile string - tlsConfig *tls.Config - WorkDir string - Cluster *DockerCluster - Container *types.ContainerJSON - DockerAPI *docker.Client - runner *dockhelper.Runner - Logger log.Logger - cleanupContainer func() - RealAPIAddr string - ContainerNetworkName string - ContainerIPAddress string - ImageRepo string - ImageTag string - DataVolumeName string - cleanupVolume func() -} - -func (n *DockerClusterNode) TLSConfig() *tls.Config { - return n.tlsConfig.Clone() -} - -func (n *DockerClusterNode) APIClient() *api.Client { - // We clone to ensure that whenever this method is called, the caller gets - // back a pristine client, without e.g. any namespace or token changes that - // might pollute a shared client. We clone the config instead of the - // client because (1) Client.clone propagates the replicationStateStore and - // the httpClient pointers, (2) it doesn't copy the tlsConfig at all, and - // (3) if clone returns an error, it doesn't feel as appropriate to panic - // below. Who knows why clone might return an error? - cfg := n.client.CloneConfig() - client, err := api.NewClient(cfg) - if err != nil { - // It seems fine to panic here, since this should be the same input - // we provided to NewClient when we were setup, and we didn't panic then. - // Better not to completely ignore the error though, suppose there's a - // bug in CloneConfig? - panic(fmt.Sprintf("NewClient error on cloned config: %v", err)) - } - client.SetToken(n.Cluster.rootToken) - return client -} - -// NewAPIClient creates and configures a Vault API client to communicate with -// the running Vault Cluster for this DockerClusterNode -func (n *DockerClusterNode) apiConfig() (*api.Config, error) { - transport := cleanhttp.DefaultPooledTransport() - transport.TLSClientConfig = n.TLSConfig() - if err := http2.ConfigureTransport(transport); err != nil { - return nil, err - } - client := &http.Client{ - Transport: transport, - CheckRedirect: func(*http.Request, []*http.Request) error { - // This can of course be overridden per-test by using its own client - return fmt.Errorf("redirects not allowed in these tests") - }, - } - config := api.DefaultConfig() - if config.Error != nil { - return nil, config.Error - } - config.Address = fmt.Sprintf("https://%s", n.HostPort) - config.HttpClient = client - config.MaxRetries = 0 - return config, nil -} - -func (n *DockerClusterNode) newAPIClient() (*api.Client, error) { - config, err := n.apiConfig() - if err != nil { - return nil, err - } - client, err := api.NewClient(config) - if err != nil { - return nil, err - } - client.SetToken(n.Cluster.GetRootToken()) - return client, nil -} - -// Cleanup kills the container of the node and deletes its data volume -func (n *DockerClusterNode) Cleanup() { - n.cleanup() -} - -// Stop kills the container of the node -func (n *DockerClusterNode) Stop() { - n.cleanupContainer() -} - -func (n *DockerClusterNode) cleanup() error { - if n.Container == nil || n.Container.ID == "" { - return nil - } - n.cleanupContainer() - n.cleanupVolume() - return nil -} - -func (n *DockerClusterNode) Start(ctx context.Context, opts *DockerClusterOptions) error { - if n.DataVolumeName == "" { - vol, err := n.DockerAPI.VolumeCreate(ctx, volume.CreateOptions{}) - if err != nil { - return err - } - n.DataVolumeName = vol.Name - n.cleanupVolume = func() { - _ = n.DockerAPI.VolumeRemove(ctx, vol.Name, false) - } - } - vaultCfg := map[string]interface{}{} - vaultCfg["listener"] = map[string]interface{}{ - "tcp": map[string]interface{}{ - "address": fmt.Sprintf("%s:%d", "0.0.0.0", 8200), - "tls_cert_file": "/vault/config/cert.pem", - "tls_key_file": "/vault/config/key.pem", - "telemetry": map[string]interface{}{ - "unauthenticated_metrics_access": true, - }, - }, - } - vaultCfg["telemetry"] = map[string]interface{}{ - "disable_hostname": true, - } - raftOpts := map[string]interface{}{ - // TODO add options from vnc - "path": "/vault/file", - "node_id": n.NodeID, - } - vaultCfg["storage"] = map[string]interface{}{ - "raft": raftOpts, - } - if opts != nil && opts.VaultNodeConfig != nil && len(opts.VaultNodeConfig.StorageOptions) > 0 { - for k, v := range opts.VaultNodeConfig.StorageOptions { - if _, ok := raftOpts[k].(string); !ok { - raftOpts[k] = v - } - } - } - //// disable_mlock is required for working in the Docker environment with - //// custom plugins - vaultCfg["disable_mlock"] = true - vaultCfg["api_addr"] = `https://{{- GetAllInterfaces | exclude "flags" "loopback" | attr "address" -}}:8200` - vaultCfg["cluster_addr"] = `https://{{- GetAllInterfaces | exclude "flags" "loopback" | attr "address" -}}:8201` - - systemJSON, err := json.Marshal(vaultCfg) - if err != nil { - return err - } - err = os.WriteFile(filepath.Join(n.WorkDir, "system.json"), systemJSON, 0o644) - if err != nil { - return err - } - - if opts.VaultNodeConfig != nil { - localCfg := *opts.VaultNodeConfig - if opts.VaultNodeConfig.LicensePath != "" { - b, err := os.ReadFile(opts.VaultNodeConfig.LicensePath) - if err != nil || len(b) == 0 { - return fmt.Errorf("unable to read LicensePath at %q: %w", opts.VaultNodeConfig.LicensePath, err) - } - localCfg.LicensePath = "/vault/config/license" - dest := filepath.Join(n.WorkDir, "license") - err = os.WriteFile(dest, b, 0o644) - if err != nil { - return fmt.Errorf("error writing license to %q: %w", dest, err) - } - - } - userJSON, err := json.Marshal(localCfg) - if err != nil { - return err - } - err = os.WriteFile(filepath.Join(n.WorkDir, "user.json"), userJSON, 0o644) - if err != nil { - return err - } - } - - // Create a temporary cert so vault will start up - err = n.setupCert("127.0.0.1") - if err != nil { - return err - } - - caDir := filepath.Join(n.Cluster.tmpDir, "ca") - - // setup plugin bin copy if needed - copyFromTo := map[string]string{ - n.WorkDir: "/vault/config", - caDir: "/usr/local/share/ca-certificates/", - } - - var wg sync.WaitGroup - wg.Add(1) - var seenLogs uberAtomic.Bool - logConsumer := func(s string) { - if seenLogs.CAS(false, true) { - wg.Done() - } - n.Logger.Trace(s) - } - logStdout := &LogConsumerWriter{logConsumer} - logStderr := &LogConsumerWriter{func(s string) { - if seenLogs.CAS(false, true) { - wg.Done() - } - testcluster.JSONLogNoTimestamp(n.Logger, s) - }} - r, err := dockhelper.NewServiceRunner(dockhelper.RunOptions{ - ImageRepo: n.ImageRepo, - ImageTag: n.ImageTag, - // We don't need to run update-ca-certificates in the container, because - // we're providing the CA in the raft join call, and otherwise Vault - // servers don't talk to one another on the API port. - Cmd: append([]string{"server"}, opts.Args...), - Env: []string{ - // For now we're using disable_mlock, because this is for testing - // anyway, and because it prevents us using external plugins. - "SKIP_SETCAP=true", - "VAULT_LOG_FORMAT=json", - "VAULT_LICENSE=" + opts.VaultLicense, - }, - Ports: []string{"8200/tcp", "8201/tcp"}, - ContainerName: n.Name(), - NetworkName: opts.NetworkName, - CopyFromTo: copyFromTo, - LogConsumer: logConsumer, - LogStdout: logStdout, - LogStderr: logStderr, - PreDelete: true, - DoNotAutoRemove: true, - PostStart: func(containerID string, realIP string) error { - err := n.setupCert(realIP) - if err != nil { - return err - } - - // If we signal Vault before it installs its sighup handler, it'll die. - wg.Wait() - n.Logger.Trace("running poststart", "containerID", containerID, "IP", realIP) - return n.runner.RefreshFiles(ctx, containerID) - }, - Capabilities: []string{"NET_ADMIN"}, - OmitLogTimestamps: true, - VolumeNameToMountPoint: map[string]string{ - n.DataVolumeName: "/vault/file", - }, - }) - if err != nil { - return err - } - n.runner = r - - probe := opts.StartProbe - if probe == nil { - probe = func(c *api.Client) error { - _, err = c.Sys().SealStatus() - return err - } - } - svc, _, err := r.StartNewService(ctx, false, false, func(ctx context.Context, host string, port int) (dockhelper.ServiceConfig, error) { - config, err := n.apiConfig() - if err != nil { - return nil, err - } - config.Address = fmt.Sprintf("https://%s:%d", host, port) - client, err := api.NewClient(config) - if err != nil { - return nil, err - } - err = probe(client) - if err != nil { - return nil, err - } - - return dockhelper.NewServiceHostPort(host, port), nil - }) - if err != nil { - return err - } - - n.HostPort = svc.Config.Address() - n.Container = svc.Container - netName := opts.NetworkName - if netName == "" { - if len(svc.Container.NetworkSettings.Networks) > 1 { - return fmt.Errorf("Set d.RunOptions.NetworkName instead for container with multiple networks: %v", svc.Container.NetworkSettings.Networks) - } - for netName = range svc.Container.NetworkSettings.Networks { - // Networks above is a map; we just need to find the first and - // only key of this map (network name). The range handles this - // for us, but we need a loop construction in order to use range. - } - } - n.ContainerNetworkName = netName - n.ContainerIPAddress = svc.Container.NetworkSettings.Networks[netName].IPAddress - n.RealAPIAddr = "https://" + n.ContainerIPAddress + ":8200" - n.cleanupContainer = svc.Cleanup - - client, err := n.newAPIClient() - if err != nil { - return err - } - client.SetToken(n.Cluster.rootToken) - n.client = client - return nil -} - -func (n *DockerClusterNode) Pause(ctx context.Context) error { - return n.DockerAPI.ContainerPause(ctx, n.Container.ID) -} - -func (n *DockerClusterNode) AddNetworkDelay(ctx context.Context, delay time.Duration, targetIP string) error { - ip := net.ParseIP(targetIP) - if ip == nil { - return fmt.Errorf("targetIP %q is not an IP address", targetIP) - } - // Let's attempt to get a unique handle for the filter rule; we'll assume that - // every targetIP has a unique last octet, which is true currently for how - // we're doing docker networking. - lastOctet := ip.To4()[3] - - stdout, stderr, exitCode, err := n.runner.RunCmdWithOutput(ctx, n.Container.ID, []string{ - "/bin/sh", - "-xec", strings.Join([]string{ - fmt.Sprintf("echo isolating node %s", targetIP), - "apk add iproute2", - // If we're running this script a second time on the same node, - // the add dev will fail; since we only want to run the netem - // command once, we'll do so in the case where the add dev doesn't fail. - "tc qdisc add dev eth0 root handle 1: prio && " + - fmt.Sprintf("tc qdisc add dev eth0 parent 1:1 handle 2: netem delay %dms", delay/time.Millisecond), - // Here we create a u32 filter as per https://man7.org/linux/man-pages/man8/tc-u32.8.html - // Its parent is 1:0 (which I guess is the root?) - // Its handle must be unique, so we base it on targetIP - fmt.Sprintf("tc filter add dev eth0 parent 1:0 protocol ip pref 55 handle ::%x u32 match ip dst %s flowid 2:1", lastOctet, targetIP), - }, "; "), - }) - if err != nil { - return err - } - - n.Logger.Trace(string(stdout)) - n.Logger.Trace(string(stderr)) - if exitCode != 0 { - return fmt.Errorf("got nonzero exit code from iptables: %d", exitCode) - } - return nil -} - -type LogConsumerWriter struct { - consumer func(string) -} - -func (l LogConsumerWriter) Write(p []byte) (n int, err error) { - // TODO this assumes that we're never passed partial log lines, which - // seems a safe assumption for now based on how docker looks to implement - // logging, but might change in the future. - scanner := bufio.NewScanner(bytes.NewReader(p)) - scanner.Buffer(make([]byte, 64*1024), bufio.MaxScanTokenSize) - for scanner.Scan() { - l.consumer(scanner.Text()) - } - return len(p), nil -} - -// DockerClusterOptions has options for setting up the docker cluster -type DockerClusterOptions struct { - testcluster.ClusterOptions - CAKey *ecdsa.PrivateKey - NetworkName string - ImageRepo string - ImageTag string - CA *testcluster.CA - VaultBinary string - Args []string - StartProbe func(*api.Client) error -} - -func ensureLeaderMatches(ctx context.Context, client *api.Client, ready func(response *api.LeaderResponse) error) error { - var leader *api.LeaderResponse - var err error - for ctx.Err() == nil { - leader, err = client.Sys().Leader() - switch { - case err != nil: - case leader == nil: - err = fmt.Errorf("nil response to leader check") - default: - err = ready(leader) - if err == nil { - return nil - } - } - time.Sleep(500 * time.Millisecond) - } - return fmt.Errorf("error checking leader: %v", err) -} - -const DefaultNumCores = 3 - -// creates a managed docker container running Vault -func (dc *DockerCluster) setupDockerCluster(ctx context.Context, opts *DockerClusterOptions) error { - if opts.TmpDir != "" { - if _, err := os.Stat(opts.TmpDir); os.IsNotExist(err) { - if err := os.MkdirAll(opts.TmpDir, 0o700); err != nil { - return err - } - } - dc.tmpDir = opts.TmpDir - } else { - tempDir, err := ioutil.TempDir("", "vault-test-cluster-") - if err != nil { - return err - } - dc.tmpDir = tempDir - } - caDir := filepath.Join(dc.tmpDir, "ca") - if err := os.MkdirAll(caDir, 0o755); err != nil { - return err - } - - var numCores int - if opts.NumCores == 0 { - numCores = DefaultNumCores - } else { - numCores = opts.NumCores - } - - if dc.CA == nil { - if err := dc.setupCA(opts); err != nil { - return err - } - } - dc.RootCAs = x509.NewCertPool() - dc.RootCAs.AddCert(dc.CA.CACert) - - for i := 0; i < numCores; i++ { - if err := dc.addNode(ctx, opts); err != nil { - return err - } - if opts.SkipInit { - continue - } - if i == 0 { - if err := dc.setupNode0(ctx); err != nil { - return nil - } - } else { - if err := dc.joinNode(ctx, i, 0); err != nil { - return err - } - } - } - - return nil -} - -func (dc *DockerCluster) AddNode(ctx context.Context, opts *DockerClusterOptions) error { - leaderIdx, err := testcluster.LeaderNode(ctx, dc) - if err != nil { - return err - } - if err := dc.addNode(ctx, opts); err != nil { - return err - } - - return dc.joinNode(ctx, len(dc.ClusterNodes)-1, leaderIdx) -} - -func (dc *DockerCluster) addNode(ctx context.Context, opts *DockerClusterOptions) error { - tag, err := dc.setupImage(ctx, opts) - if err != nil { - return err - } - i := len(dc.ClusterNodes) - nodeID := fmt.Sprintf("core-%d", i) - node := &DockerClusterNode{ - DockerAPI: dc.DockerAPI, - NodeID: nodeID, - Cluster: dc, - WorkDir: filepath.Join(dc.tmpDir, nodeID), - Logger: dc.Logger.Named(nodeID), - ImageRepo: opts.ImageRepo, - ImageTag: tag, - } - dc.ClusterNodes = append(dc.ClusterNodes, node) - if err := os.MkdirAll(node.WorkDir, 0o755); err != nil { - return err - } - if err := node.Start(ctx, opts); err != nil { - return err - } - return nil -} - -func (dc *DockerCluster) joinNode(ctx context.Context, nodeIdx int, leaderIdx int) error { - leader := dc.ClusterNodes[leaderIdx] - - if nodeIdx >= len(dc.ClusterNodes) { - return fmt.Errorf("invalid node %d", nodeIdx) - } - node := dc.ClusterNodes[nodeIdx] - client := node.APIClient() - - var resp *api.RaftJoinResponse - resp, err := client.Sys().RaftJoinWithContext(ctx, &api.RaftJoinRequest{ - // When running locally on a bridge network, the containers must use their - // actual (private) IP to talk to one another. Our code must instead use - // the portmapped address since we're not on their network in that case. - LeaderAPIAddr: leader.RealAPIAddr, - LeaderCACert: string(dc.CACertPEM), - LeaderClientCert: string(node.ServerCertPEM), - LeaderClientKey: string(node.ServerKeyPEM), - }) - if resp == nil || !resp.Joined { - return fmt.Errorf("nil or negative response from raft join request: %v", resp) - } - if err != nil { - return fmt.Errorf("failed to join cluster: %w", err) - } - - return testcluster.UnsealNode(ctx, dc, nodeIdx) -} - -func (dc *DockerCluster) setupImage(ctx context.Context, opts *DockerClusterOptions) (string, error) { - if opts == nil { - opts = &DockerClusterOptions{} - } - sourceTag := opts.ImageTag - if sourceTag == "" { - sourceTag = "latest" - } - - if opts.VaultBinary == "" { - return sourceTag, nil - } - - suffix := "testing" - if sha := os.Getenv("COMMIT_SHA"); sha != "" { - suffix = sha - } - tag := sourceTag + "-" + suffix - if _, ok := dc.builtTags[tag]; ok { - return tag, nil - } - - f, err := os.Open(opts.VaultBinary) - if err != nil { - return "", err - } - data, err := io.ReadAll(f) - if err != nil { - return "", err - } - bCtx := dockhelper.NewBuildContext() - bCtx["vault"] = &dockhelper.FileContents{ - Data: data, - Mode: 0o755, - } - - containerFile := fmt.Sprintf(` -FROM %s:%s -COPY vault /bin/vault -`, opts.ImageRepo, sourceTag) - - _, err = dockhelper.BuildImage(ctx, dc.DockerAPI, containerFile, bCtx, - dockhelper.BuildRemove(true), dockhelper.BuildForceRemove(true), - dockhelper.BuildPullParent(true), - dockhelper.BuildTags([]string{opts.ImageRepo + ":" + tag})) - if err != nil { - return "", err - } - dc.builtTags[tag] = struct{}{} - return tag, nil -} - -/* Notes on testing the non-bridge network case: -- you need the test itself to be running in a container so that it can use - the network; create the network using - docker network create testvault -- this means that you need to mount the docker socket in that test container, - but on macos there's stuff that prevents that from working; to hack that, - on the host run - sudo ln -s "$HOME/Library/Containers/com.docker.docker/Data/docker.raw.sock" /var/run/docker.sock.raw -- run the test container like - docker run --rm -it --network testvault \ - -v /var/run/docker.sock.raw:/var/run/docker.sock \ - -v $(pwd):/home/circleci/go/src/github.com/hashicorp/vault/ \ - -w /home/circleci/go/src/github.com/hashicorp/vault/ \ - "docker.mirror.hashicorp.services/cimg/go:1.19.2" /bin/bash -- in the container you may need to chown/chmod /var/run/docker.sock; use `docker ps` - to test if it's working - -*/ diff --git a/sdk/helper/testcluster/docker/replication.go b/sdk/helper/testcluster/docker/replication.go deleted file mode 100644 index c313e7af4d8d20..00000000000000 --- a/sdk/helper/testcluster/docker/replication.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package docker - -import ( - "context" - "fmt" - "os" - "strings" - "testing" - - "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/sdk/helper/logging" - "github.com/hashicorp/vault/sdk/helper/testcluster" -) - -func DefaultOptions(t *testing.T) *DockerClusterOptions { - return &DockerClusterOptions{ - ImageRepo: "hashicorp/vault", - ImageTag: "latest", - VaultBinary: os.Getenv("VAULT_BINARY"), - ClusterOptions: testcluster.ClusterOptions{ - NumCores: 3, - ClusterName: strings.ReplaceAll(t.Name(), "/", "-"), - VaultNodeConfig: &testcluster.VaultNodeConfig{ - LogLevel: "TRACE", - }, - }, - } -} - -func NewReplicationSetDocker(t *testing.T, opts *DockerClusterOptions) (*testcluster.ReplicationSet, error) { - binary := os.Getenv("VAULT_BINARY") - if binary == "" { - t.Skip("only running docker test when $VAULT_BINARY present") - } - - r := &testcluster.ReplicationSet{ - Clusters: map[string]testcluster.VaultCluster{}, - Logger: logging.NewVaultLogger(hclog.Trace).Named(t.Name()), - } - - // clusterName is used for container name as well. - // A container name should not exceed 64 chars. - // There are additional chars that are added to the name as well - // like "-A-core0". So, setting a max limit for a cluster name. - if len(opts.ClusterName) > MaxClusterNameLength { - return nil, fmt.Errorf("cluster name length exceeded the maximum allowed length of %v", MaxClusterNameLength) - } - - r.Builder = func(ctx context.Context, name string, baseLogger hclog.Logger) (testcluster.VaultCluster, error) { - myOpts := *opts - myOpts.Logger = baseLogger.Named(name) - if myOpts.ClusterName == "" { - myOpts.ClusterName = strings.ReplaceAll(t.Name(), "/", "-") - } - myOpts.ClusterName += "-" + strings.ReplaceAll(name, "/", "-") - myOpts.CA = r.CA - return NewTestDockerCluster(t, &myOpts), nil - } - - a, err := r.Builder(context.TODO(), "A", r.Logger) - if err != nil { - return nil, err - } - r.Clusters["A"] = a - r.CA = a.(*DockerCluster).CA - - return r, err -} diff --git a/sdk/helper/testcluster/exec.go b/sdk/helper/testcluster/exec.go deleted file mode 100644 index d91a3de034ac6f..00000000000000 --- a/sdk/helper/testcluster/exec.go +++ /dev/null @@ -1,324 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package testcluster - -import ( - "bufio" - "context" - "crypto/tls" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "testing" - "time" - - log "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/sdk/helper/jsonutil" - "github.com/hashicorp/vault/sdk/helper/logging" -) - -type ExecDevCluster struct { - ID string - ClusterName string - ClusterNodes []*execDevClusterNode - CACertPEMFile string - barrierKeys [][]byte - recoveryKeys [][]byte - tmpDir string - clientAuthRequired bool - rootToken string - stop func() - stopCh chan struct{} - Logger log.Logger -} - -func (dc *ExecDevCluster) SetRootToken(token string) { - dc.rootToken = token -} - -func (dc *ExecDevCluster) NamedLogger(s string) log.Logger { - return dc.Logger.Named(s) -} - -var _ VaultCluster = &ExecDevCluster{} - -type ExecDevClusterOptions struct { - ClusterOptions - BinaryPath string - // this is -dev-listen-address, defaults to "127.0.0.1:8200" - BaseListenAddress string -} - -func NewTestExecDevCluster(t *testing.T, opts *ExecDevClusterOptions) *ExecDevCluster { - if opts == nil { - opts = &ExecDevClusterOptions{} - } - if opts.ClusterName == "" { - opts.ClusterName = strings.ReplaceAll(t.Name(), "/", "-") - } - if opts.Logger == nil { - opts.Logger = logging.NewVaultLogger(log.Trace).Named(t.Name()) // .Named("container") - } - if opts.VaultLicense == "" { - opts.VaultLicense = os.Getenv(EnvVaultLicenseCI) - } - - ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) - t.Cleanup(cancel) - - dc, err := NewExecDevCluster(ctx, opts) - if err != nil { - t.Fatal(err) - } - return dc -} - -func NewExecDevCluster(ctx context.Context, opts *ExecDevClusterOptions) (*ExecDevCluster, error) { - dc := &ExecDevCluster{ - ClusterName: opts.ClusterName, - stopCh: make(chan struct{}), - } - - if opts == nil { - opts = &ExecDevClusterOptions{} - } - if opts.NumCores == 0 { - opts.NumCores = 3 - } - if err := dc.setupExecDevCluster(ctx, opts); err != nil { - dc.Cleanup() - return nil, err - } - - return dc, nil -} - -func (dc *ExecDevCluster) setupExecDevCluster(ctx context.Context, opts *ExecDevClusterOptions) (retErr error) { - if opts == nil { - opts = &ExecDevClusterOptions{} - } - if opts.Logger == nil { - opts.Logger = log.NewNullLogger() - } - dc.Logger = opts.Logger - - if opts.TmpDir != "" { - if _, err := os.Stat(opts.TmpDir); os.IsNotExist(err) { - if err := os.MkdirAll(opts.TmpDir, 0o700); err != nil { - return err - } - } - dc.tmpDir = opts.TmpDir - } else { - tempDir, err := os.MkdirTemp("", "vault-test-cluster-") - if err != nil { - return err - } - dc.tmpDir = tempDir - } - - // This context is used to stop the subprocess - execCtx, cancel := context.WithCancel(context.Background()) - dc.stop = func() { - cancel() - close(dc.stopCh) - } - defer func() { - if retErr != nil { - cancel() - } - }() - - bin := opts.BinaryPath - if bin == "" { - bin = "vault" - } - - clusterJsonPath := filepath.Join(dc.tmpDir, "cluster.json") - args := []string{"server", "-dev", "-dev-cluster-json", clusterJsonPath} - switch { - case opts.NumCores == 3: - args = append(args, "-dev-three-node") - case opts.NumCores == 1: - args = append(args, "-dev-tls") - default: - return fmt.Errorf("NumCores=1 and NumCores=3 are the only supported options right now") - } - if opts.BaseListenAddress != "" { - args = append(args, "-dev-listen-address", opts.BaseListenAddress) - } - cmd := exec.CommandContext(execCtx, bin, args...) - cmd.Env = os.Environ() - cmd.Env = append(cmd.Env, "VAULT_LICENSE="+opts.VaultLicense) - cmd.Env = append(cmd.Env, "VAULT_LOG_FORMAT=json") - cmd.Env = append(cmd.Env, "VAULT_DEV_TEMP_DIR="+dc.tmpDir) - if opts.Logger != nil { - stdout, err := cmd.StdoutPipe() - if err != nil { - return err - } - go func() { - outlog := opts.Logger.Named("stdout") - scanner := bufio.NewScanner(stdout) - for scanner.Scan() { - outlog.Trace(scanner.Text()) - } - }() - stderr, err := cmd.StderrPipe() - if err != nil { - return err - } - go func() { - errlog := opts.Logger.Named("stderr") - scanner := bufio.NewScanner(stderr) - // The default buffer is 4k, and Vault can emit bigger log lines - scanner.Buffer(make([]byte, 64*1024), bufio.MaxScanTokenSize) - for scanner.Scan() { - JSONLogNoTimestamp(errlog, scanner.Text()) - } - }() - } - - if err := cmd.Start(); err != nil { - return err - } - - for ctx.Err() == nil { - if b, err := os.ReadFile(clusterJsonPath); err == nil && len(b) > 0 { - var clusterJson ClusterJson - if err := jsonutil.DecodeJSON(b, &clusterJson); err != nil { - continue - } - dc.CACertPEMFile = clusterJson.CACertPath - dc.rootToken = clusterJson.RootToken - for i, node := range clusterJson.Nodes { - config := api.DefaultConfig() - config.Address = node.APIAddress - err := config.ConfigureTLS(&api.TLSConfig{ - CACert: clusterJson.CACertPath, - }) - if err != nil { - return err - } - client, err := api.NewClient(config) - if err != nil { - return err - } - client.SetToken(dc.rootToken) - _, err = client.Sys().ListMounts() - if err != nil { - return err - } - - dc.ClusterNodes = append(dc.ClusterNodes, &execDevClusterNode{ - name: fmt.Sprintf("core-%d", i), - client: client, - }) - } - return nil - } - time.Sleep(500 * time.Millisecond) - } - return ctx.Err() -} - -type execDevClusterNode struct { - name string - client *api.Client -} - -var _ VaultClusterNode = &execDevClusterNode{} - -func (e *execDevClusterNode) Name() string { - return e.name -} - -func (e *execDevClusterNode) APIClient() *api.Client { - // We clone to ensure that whenever this method is called, the caller gets - // back a pristine client, without e.g. any namespace or token changes that - // might pollute a shared client. We clone the config instead of the - // client because (1) Client.clone propagates the replicationStateStore and - // the httpClient pointers, (2) it doesn't copy the tlsConfig at all, and - // (3) if clone returns an error, it doesn't feel as appropriate to panic - // below. Who knows why clone might return an error? - cfg := e.client.CloneConfig() - client, err := api.NewClient(cfg) - if err != nil { - // It seems fine to panic here, since this should be the same input - // we provided to NewClient when we were setup, and we didn't panic then. - // Better not to completely ignore the error though, suppose there's a - // bug in CloneConfig? - panic(fmt.Sprintf("NewClient error on cloned config: %v", err)) - } - client.SetToken(e.client.Token()) - return client -} - -func (e *execDevClusterNode) TLSConfig() *tls.Config { - return e.client.CloneConfig().TLSConfig() -} - -func (dc *ExecDevCluster) ClusterID() string { - return dc.ID -} - -func (dc *ExecDevCluster) Nodes() []VaultClusterNode { - ret := make([]VaultClusterNode, len(dc.ClusterNodes)) - for i := range dc.ClusterNodes { - ret[i] = dc.ClusterNodes[i] - } - return ret -} - -func (dc *ExecDevCluster) GetBarrierKeys() [][]byte { - return dc.barrierKeys -} - -func copyKey(key []byte) []byte { - result := make([]byte, len(key)) - copy(result, key) - return result -} - -func (dc *ExecDevCluster) GetRecoveryKeys() [][]byte { - ret := make([][]byte, len(dc.recoveryKeys)) - for i, k := range dc.recoveryKeys { - ret[i] = copyKey(k) - } - return ret -} - -func (dc *ExecDevCluster) GetBarrierOrRecoveryKeys() [][]byte { - return dc.GetBarrierKeys() -} - -func (dc *ExecDevCluster) SetBarrierKeys(keys [][]byte) { - dc.barrierKeys = make([][]byte, len(keys)) - for i, k := range keys { - dc.barrierKeys[i] = copyKey(k) - } -} - -func (dc *ExecDevCluster) SetRecoveryKeys(keys [][]byte) { - dc.recoveryKeys = make([][]byte, len(keys)) - for i, k := range keys { - dc.recoveryKeys[i] = copyKey(k) - } -} - -func (dc *ExecDevCluster) GetCACertPEMFile() string { - return dc.CACertPEMFile -} - -func (dc *ExecDevCluster) Cleanup() { - dc.stop() -} - -// GetRootToken returns the root token of the cluster, if set -func (dc *ExecDevCluster) GetRootToken() string { - return dc.rootToken -} diff --git a/sdk/helper/testcluster/logging.go b/sdk/helper/testcluster/logging.go deleted file mode 100644 index dda759c7f84f10..00000000000000 --- a/sdk/helper/testcluster/logging.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package testcluster - -import ( - "encoding/json" - "strings" - - "github.com/hashicorp/go-hclog" -) - -func JSONLogNoTimestamp(outlog hclog.Logger, text string) { - d := json.NewDecoder(strings.NewReader(text)) - m := map[string]interface{}{} - if err := d.Decode(&m); err != nil { - outlog.Error("failed to decode json output from dev vault", "error", err, "input", text) - return - } - - delete(m, "@timestamp") - message := m["@message"].(string) - delete(m, "@message") - level := m["@level"].(string) - delete(m, "@level") - if module, ok := m["@module"]; ok { - delete(m, "@module") - outlog = outlog.Named(module.(string)) - } - - var pairs []interface{} - for k, v := range m { - pairs = append(pairs, k, v) - } - - outlog.Log(hclog.LevelFromString(level), message, pairs...) -} diff --git a/sdk/helper/testcluster/replication.go b/sdk/helper/testcluster/replication.go deleted file mode 100644 index 46356deff14012..00000000000000 --- a/sdk/helper/testcluster/replication.go +++ /dev/null @@ -1,905 +0,0 @@ -package testcluster - -import ( - "context" - "encoding/json" - "fmt" - "reflect" - "strings" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/hashicorp/go-secure-stdlib/strutil" - "github.com/hashicorp/go-uuid" - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/sdk/helper/consts" - "github.com/mitchellh/mapstructure" -) - -func GetPerformanceToken(pri VaultCluster, id, secondaryPublicKey string) (string, error) { - client := pri.Nodes()[0].APIClient() - req := map[string]interface{}{ - "id": id, - } - if secondaryPublicKey != "" { - req["secondary_public_key"] = secondaryPublicKey - } - secret, err := client.Logical().Write("sys/replication/performance/primary/secondary-token", req) - if err != nil { - return "", err - } - - if secondaryPublicKey != "" { - return secret.Data["token"].(string), nil - } - return secret.WrapInfo.Token, nil -} - -func EnablePerfPrimary(ctx context.Context, pri VaultCluster) error { - client := pri.Nodes()[0].APIClient() - _, err := client.Logical().WriteWithContext(ctx, "sys/replication/performance/primary/enable", nil) - if err != nil { - return err - } - - err = WaitForPerfReplicationState(ctx, pri, consts.ReplicationPerformancePrimary) - if err != nil { - return err - } - return WaitForActiveNodeAndPerfStandbys(ctx, pri) -} - -func WaitForPerfReplicationState(ctx context.Context, cluster VaultCluster, state consts.ReplicationState) error { - client := cluster.Nodes()[0].APIClient() - var health *api.HealthResponse - var err error - for ctx.Err() == nil { - health, err = client.Sys().HealthWithContext(ctx) - if err == nil && health.ReplicationPerformanceMode == state.GetPerformanceString() { - return nil - } - time.Sleep(500 * time.Millisecond) - } - if err == nil { - err = ctx.Err() - } - return err -} - -func EnablePerformanceSecondaryNoWait(ctx context.Context, perfToken string, pri, sec VaultCluster, updatePrimary bool) error { - postData := map[string]interface{}{ - "token": perfToken, - "ca_file": DefaultCAFile, - } - path := "sys/replication/performance/secondary/enable" - if updatePrimary { - path = "sys/replication/performance/secondary/update-primary" - } - err := WaitForActiveNodeAndPerfStandbys(ctx, sec) - if err != nil { - return err - } - _, err = sec.Nodes()[0].APIClient().Logical().Write(path, postData) - if err != nil { - return err - } - - return WaitForPerfReplicationState(ctx, sec, consts.ReplicationPerformanceSecondary) -} - -func EnablePerformanceSecondary(ctx context.Context, perfToken string, pri, sec VaultCluster, updatePrimary, skipPoisonPill bool) (string, error) { - if err := EnablePerformanceSecondaryNoWait(ctx, perfToken, pri, sec, updatePrimary); err != nil { - return "", err - } - if err := WaitForMatchingMerkleRoots(ctx, "sys/replication/performance/", pri, sec); err != nil { - return "", err - } - root, err := WaitForPerformanceSecondary(ctx, pri, sec, skipPoisonPill) - if err != nil { - return "", err - } - if err := WaitForPerfReplicationWorking(ctx, pri, sec); err != nil { - return "", err - } - return root, nil -} - -func WaitForMatchingMerkleRoots(ctx context.Context, endpoint string, pri, sec VaultCluster) error { - getRoot := func(mode string, cli *api.Client) (string, error) { - status, err := cli.Logical().Read(endpoint + "status") - if err != nil { - return "", err - } - if status == nil || status.Data == nil || status.Data["mode"] == nil { - return "", fmt.Errorf("got nil secret or data") - } - if status.Data["mode"].(string) != mode { - return "", fmt.Errorf("expected mode=%s, got %s", mode, status.Data["mode"].(string)) - } - return status.Data["merkle_root"].(string), nil - } - - secClient := sec.Nodes()[0].APIClient() - priClient := pri.Nodes()[0].APIClient() - for i := 0; i < 30; i++ { - secRoot, err := getRoot("secondary", secClient) - if err != nil { - return err - } - priRoot, err := getRoot("primary", priClient) - if err != nil { - return err - } - - if reflect.DeepEqual(priRoot, secRoot) { - return nil - } - time.Sleep(time.Second) - } - - return fmt.Errorf("roots did not become equal") -} - -func WaitForPerformanceWAL(ctx context.Context, pri, sec VaultCluster) error { - endpoint := "sys/replication/performance/" - if err := WaitForMatchingMerkleRoots(ctx, endpoint, pri, sec); err != nil { - return nil - } - getWAL := func(mode, walKey string, cli *api.Client) (int64, error) { - status, err := cli.Logical().Read(endpoint + "status") - if err != nil { - return 0, err - } - if status == nil || status.Data == nil || status.Data["mode"] == nil { - return 0, fmt.Errorf("got nil secret or data") - } - if status.Data["mode"].(string) != mode { - return 0, fmt.Errorf("expected mode=%s, got %s", mode, status.Data["mode"].(string)) - } - return status.Data[walKey].(json.Number).Int64() - } - - secClient := sec.Nodes()[0].APIClient() - priClient := pri.Nodes()[0].APIClient() - for ctx.Err() == nil { - secLastRemoteWAL, err := getWAL("secondary", "last_remote_wal", secClient) - if err != nil { - return err - } - priLastPerfWAL, err := getWAL("primary", "last_performance_wal", priClient) - if err != nil { - return err - } - - if secLastRemoteWAL >= priLastPerfWAL { - return nil - } - time.Sleep(time.Second) - } - - return fmt.Errorf("performance WALs on the secondary did not catch up with the primary, context err: %w", ctx.Err()) -} - -func WaitForPerformanceSecondary(ctx context.Context, pri, sec VaultCluster, skipPoisonPill bool) (string, error) { - if len(pri.GetRecoveryKeys()) > 0 { - sec.SetBarrierKeys(pri.GetRecoveryKeys()) - sec.SetRecoveryKeys(pri.GetRecoveryKeys()) - } else { - sec.SetBarrierKeys(pri.GetBarrierKeys()) - sec.SetRecoveryKeys(pri.GetBarrierKeys()) - } - - if len(sec.Nodes()) > 1 { - if skipPoisonPill { - // As part of prepareSecondary on the active node the keyring is - // deleted from storage. Its absence can cause standbys to seal - // themselves. But it's not reliable, so we'll seal them - // ourselves to force the issue. - for i := range sec.Nodes()[1:] { - if err := SealNode(ctx, sec, i+1); err != nil { - return "", err - } - } - } else { - // We want to make sure we unseal all the nodes so we first need to wait - // until two of the nodes seal due to the poison pill being written - if err := WaitForNCoresSealed(ctx, sec, len(sec.Nodes())-1); err != nil { - return "", err - } - } - } - if _, err := WaitForActiveNode(ctx, sec); err != nil { - return "", err - } - if err := UnsealAllNodes(ctx, sec); err != nil { - return "", err - } - - perfSecondaryRootToken, err := GenerateRoot(sec, GenerateRootRegular) - if err != nil { - return "", err - } - sec.SetRootToken(perfSecondaryRootToken) - if err := WaitForActiveNodeAndPerfStandbys(ctx, sec); err != nil { - return "", err - } - - return perfSecondaryRootToken, nil -} - -func WaitForPerfReplicationWorking(ctx context.Context, pri, sec VaultCluster) error { - priActiveIdx, err := WaitForActiveNode(ctx, pri) - if err != nil { - return err - } - secActiveIdx, err := WaitForActiveNode(ctx, sec) - if err != nil { - return err - } - - priClient, secClient := pri.Nodes()[priActiveIdx].APIClient(), sec.Nodes()[secActiveIdx].APIClient() - mountPoint, err := uuid.GenerateUUID() - if err != nil { - return err - } - err = priClient.Sys().Mount(mountPoint, &api.MountInput{ - Type: "kv", - Local: false, - }) - if err != nil { - return fmt.Errorf("unable to mount KV engine on primary") - } - - path := mountPoint + "/foo" - _, err = priClient.Logical().Write(path, map[string]interface{}{ - "bar": 1, - }) - if err != nil { - return fmt.Errorf("unable to write KV on primary", "path", path) - } - - for ctx.Err() == nil { - var secret *api.Secret - secret, err = secClient.Logical().Read(path) - if err == nil && secret != nil { - err = priClient.Sys().Unmount(mountPoint) - if err != nil { - return fmt.Errorf("unable to unmount KV engine on primary") - } - return nil - } - time.Sleep(100 * time.Millisecond) - } - if err == nil { - err = ctx.Err() - } - return fmt.Errorf("unable to read replicated KV on secondary, path=%s, err=%v", path, err) -} - -func SetupTwoClusterPerfReplication(ctx context.Context, pri, sec VaultCluster) error { - if err := EnablePerfPrimary(ctx, pri); err != nil { - return err - } - perfToken, err := GetPerformanceToken(pri, sec.ClusterID(), "") - if err != nil { - return err - } - - _, err = EnablePerformanceSecondary(ctx, perfToken, pri, sec, false, false) - return err -} - -// PassiveWaitForActiveNodeAndPerfStandbys should be used instead of -// WaitForActiveNodeAndPerfStandbys when you don't want to do any writes -// as a side-effect. This returns perfStandby nodes in the cluster and -// an error. -func PassiveWaitForActiveNodeAndPerfStandbys(ctx context.Context, pri VaultCluster) (VaultClusterNode, []VaultClusterNode, error) { - leaderNode, standbys, err := GetActiveAndStandbys(ctx, pri) - if err != nil { - return nil, nil, fmt.Errorf("failed to derive standby nodes, %w", err) - } - - for i, node := range standbys { - client := node.APIClient() - // Make sure we get perf standby nodes - if err = EnsureCoreIsPerfStandby(ctx, client); err != nil { - return nil, nil, fmt.Errorf("standby node %d is not a perfStandby, %w", i, err) - } - } - - return leaderNode, standbys, nil -} - -func GetActiveAndStandbys(ctx context.Context, cluster VaultCluster) (VaultClusterNode, []VaultClusterNode, error) { - var leaderIndex int - var err error - if leaderIndex, err = WaitForActiveNode(ctx, cluster); err != nil { - return nil, nil, err - } - - var leaderNode VaultClusterNode - var nodes []VaultClusterNode - for i, node := range cluster.Nodes() { - if i == leaderIndex { - leaderNode = node - continue - } - nodes = append(nodes, node) - } - - return leaderNode, nodes, nil -} - -func EnsureCoreIsPerfStandby(ctx context.Context, client *api.Client) error { - var err error - var health *api.HealthResponse - for ctx.Err() == nil { - health, err = client.Sys().HealthWithContext(ctx) - if err == nil && health.PerformanceStandby { - return nil - } - time.Sleep(time.Millisecond * 500) - } - if err == nil { - err = ctx.Err() - } - return err -} - -func WaitForDRReplicationState(ctx context.Context, cluster VaultCluster, state consts.ReplicationState) error { - client := cluster.Nodes()[0].APIClient() - var health *api.HealthResponse - var err error - for ctx.Err() == nil { - health, err = client.Sys().HealthWithContext(ctx) - if err == nil && health.ReplicationDRMode == state.GetDRString() { - return nil - } - time.Sleep(500 * time.Millisecond) - } - if err == nil { - err = ctx.Err() - } - return err -} - -func EnableDrPrimary(ctx context.Context, pri VaultCluster) error { - client := pri.Nodes()[0].APIClient() - _, err := client.Logical().Write("sys/replication/dr/primary/enable", nil) - if err != nil { - return err - } - - err = WaitForDRReplicationState(ctx, pri, consts.ReplicationDRPrimary) - if err != nil { - return err - } - return WaitForActiveNodeAndPerfStandbys(ctx, pri) -} - -func GenerateDRActivationToken(pri VaultCluster, id, secondaryPublicKey string) (string, error) { - client := pri.Nodes()[0].APIClient() - req := map[string]interface{}{ - "id": id, - } - if secondaryPublicKey != "" { - req["secondary_public_key"] = secondaryPublicKey - } - secret, err := client.Logical().Write("sys/replication/dr/primary/secondary-token", req) - if err != nil { - return "", err - } - - if secondaryPublicKey != "" { - return secret.Data["token"].(string), nil - } - return secret.WrapInfo.Token, nil -} - -func WaitForDRSecondary(ctx context.Context, pri, sec VaultCluster, skipPoisonPill bool) error { - if len(pri.GetRecoveryKeys()) > 0 { - sec.SetBarrierKeys(pri.GetRecoveryKeys()) - sec.SetRecoveryKeys(pri.GetRecoveryKeys()) - } else { - sec.SetBarrierKeys(pri.GetBarrierKeys()) - sec.SetRecoveryKeys(pri.GetBarrierKeys()) - } - - if len(sec.Nodes()) > 1 { - if skipPoisonPill { - // As part of prepareSecondary on the active node the keyring is - // deleted from storage. Its absence can cause standbys to seal - // themselves. But it's not reliable, so we'll seal them - // ourselves to force the issue. - for i := range sec.Nodes()[1:] { - if err := SealNode(ctx, sec, i+1); err != nil { - return err - } - } - } else { - // We want to make sure we unseal all the nodes so we first need to wait - // until two of the nodes seal due to the poison pill being written - if err := WaitForNCoresSealed(ctx, sec, len(sec.Nodes())-1); err != nil { - return err - } - } - } - if _, err := WaitForActiveNode(ctx, sec); err != nil { - return err - } - - // unseal nodes - for i := range sec.Nodes() { - if err := UnsealNode(ctx, sec, i); err != nil { - // Sometimes when we get here it's already unsealed on its own - // and then this fails for DR secondaries so check again - // The error is "path disabled in replication DR secondary mode". - if healthErr := NodeHealthy(ctx, sec, i); healthErr != nil { - // return the original error - return err - } - } - } - - sec.SetRootToken(pri.GetRootToken()) - - if _, err := WaitForActiveNode(ctx, sec); err != nil { - return err - } - - return nil -} - -func EnableDRSecondaryNoWait(ctx context.Context, sec VaultCluster, drToken string) error { - postData := map[string]interface{}{ - "token": drToken, - "ca_file": DefaultCAFile, - } - - _, err := sec.Nodes()[0].APIClient().Logical().Write("sys/replication/dr/secondary/enable", postData) - if err != nil { - return err - } - - return WaitForDRReplicationState(ctx, sec, consts.ReplicationDRSecondary) -} - -func WaitForReplicationStatus(ctx context.Context, client *api.Client, dr bool, accept func(map[string]interface{}) error) error { - url := "sys/replication/performance/status" - if dr { - url = "sys/replication/dr/status" - } - - var err error - var secret *api.Secret - for ctx.Err() == nil { - secret, err = client.Logical().Read(url) - if err == nil && secret != nil && secret.Data != nil { - if err = accept(secret.Data); err == nil { - return nil - } - } - time.Sleep(500 * time.Millisecond) - } - if err == nil { - err = ctx.Err() - } - - return fmt.Errorf("unable to get acceptable replication status: error=%v secret=%#v", err, secret) -} - -func WaitForDRReplicationWorking(ctx context.Context, pri, sec VaultCluster) error { - priClient := pri.Nodes()[0].APIClient() - secClient := sec.Nodes()[0].APIClient() - - // Make sure we've entered stream-wals mode - err := WaitForReplicationStatus(ctx, secClient, true, func(secret map[string]interface{}) error { - state := secret["state"] - if state == string("stream-wals") { - return nil - } - return fmt.Errorf("expected stream-wals replication state, got %v", state) - }) - if err != nil { - return err - } - - // Now write some data and make sure that we see last_remote_wal nonzero, i.e. - // at least one WAL has been streamed. - secret, err := priClient.Auth().Token().Create(&api.TokenCreateRequest{}) - if err != nil { - return err - } - - // Revoke the token since some tests won't be happy to see it. - err = priClient.Auth().Token().RevokeTree(secret.Auth.ClientToken) - if err != nil { - return err - } - - err = WaitForReplicationStatus(ctx, secClient, true, func(secret map[string]interface{}) error { - state := secret["state"] - if state != string("stream-wals") { - return fmt.Errorf("expected stream-wals replication state, got %v", state) - } - - if secret["last_remote_wal"] != nil { - lastRemoteWal, _ := secret["last_remote_wal"].(json.Number).Int64() - if lastRemoteWal <= 0 { - return fmt.Errorf("expected last_remote_wal to be greater than zero") - } - return nil - } - - return fmt.Errorf("replication seems to be still catching up, maybe need to wait more") - }) - if err != nil { - return err - } - return nil -} - -func EnableDrSecondary(ctx context.Context, pri, sec VaultCluster, drToken string) error { - err := EnableDRSecondaryNoWait(ctx, sec, drToken) - if err != nil { - return err - } - - if err = WaitForMatchingMerkleRoots(ctx, "sys/replication/dr/", pri, sec); err != nil { - return err - } - - err = WaitForDRSecondary(ctx, pri, sec, false) - if err != nil { - return err - } - - if err = WaitForDRReplicationWorking(ctx, pri, sec); err != nil { - return err - } - return nil -} - -func SetupTwoClusterDRReplication(ctx context.Context, pri, sec VaultCluster) error { - if err := EnableDrPrimary(ctx, pri); err != nil { - return err - } - - drToken, err := GenerateDRActivationToken(pri, sec.ClusterID(), "") - if err != nil { - return err - } - err = EnableDrSecondary(ctx, pri, sec, drToken) - if err != nil { - return err - } - return nil -} - -func DemoteDRPrimary(client *api.Client) error { - _, err := client.Logical().Write("sys/replication/dr/primary/demote", map[string]interface{}{}) - return err -} - -func createBatchToken(client *api.Client, path string) (string, error) { - // TODO: should these be more random in case more than one batch token needs to be created? - suffix := strings.Replace(path, "/", "", -1) - policyName := "path-batch-policy-" + suffix - roleName := "path-batch-role-" + suffix - - rules := fmt.Sprintf(`path "%s" { capabilities = [ "read", "update" ] }`, path) - - // create policy - _, err := client.Logical().Write("sys/policy/"+policyName, map[string]interface{}{ - "policy": rules, - }) - if err != nil { - return "", err - } - - // create a role - _, err = client.Logical().Write("auth/token/roles/"+roleName, map[string]interface{}{ - "allowed_policies": policyName, - "orphan": true, - "renewable": false, - "token_type": "batch", - }) - if err != nil { - return "", err - } - - // create batch token - secret, err := client.Logical().Write("auth/token/create/"+roleName, nil) - if err != nil { - return "", err - } - - return secret.Auth.ClientToken, nil -} - -// PromoteDRSecondaryWithBatchToken creates a batch token for DR promotion -// before promotion, it demotes the primary cluster. The primary cluster needs -// to be functional for the generation of the batch token -func PromoteDRSecondaryWithBatchToken(ctx context.Context, pri, sec VaultCluster) error { - client := pri.Nodes()[0].APIClient() - drToken, err := createBatchToken(client, "sys/replication/dr/secondary/promote") - if err != nil { - return err - } - - err = DemoteDRPrimary(client) - if err != nil { - return err - } - - return promoteDRSecondaryInternal(ctx, sec, drToken) -} - -// PromoteDRSecondary generates a DR operation token on the secondary using -// unseal/recovery keys. Therefore, the primary cluster could potentially -// be out of service. -func PromoteDRSecondary(ctx context.Context, sec VaultCluster) error { - // generate DR operation token to do update primary on vC to point to - // the new perfSec primary vD - drToken, err := GenerateRoot(sec, GenerateRootDR) - if err != nil { - return err - } - return promoteDRSecondaryInternal(ctx, sec, drToken) -} - -func promoteDRSecondaryInternal(ctx context.Context, sec VaultCluster, drToken string) error { - secClient := sec.Nodes()[0].APIClient() - - // Allow retries of 503s, e.g.: replication is still catching up, - // try again later or provide the "force" argument - oldMaxRetries := secClient.MaxRetries() - secClient.SetMaxRetries(10) - defer secClient.SetMaxRetries(oldMaxRetries) - resp, err := secClient.Logical().Write("sys/replication/dr/secondary/promote", map[string]interface{}{ - "dr_operation_token": drToken, - }) - if err != nil { - return err - } - if resp == nil { - return fmt.Errorf("nil status response during DR promotion") - } - - if _, err := WaitForActiveNode(ctx, sec); err != nil { - return err - } - - return WaitForDRReplicationState(ctx, sec, consts.ReplicationDRPrimary) -} - -func checkClusterAddr(ctx context.Context, pri, sec VaultCluster) error { - priClient := pri.Nodes()[0].APIClient() - priLeader, err := priClient.Sys().LeaderWithContext(ctx) - if err != nil { - return err - } - secClient := sec.Nodes()[0].APIClient() - endpoint := "sys/replication/dr/" - status, err := secClient.Logical().Read(endpoint + "status") - if err != nil { - return err - } - if status == nil || status.Data == nil { - return fmt.Errorf("got nil secret or data") - } - - var priAddrs []string - err = mapstructure.Decode(status.Data["known_primary_cluster_addrs"], &priAddrs) - if err != nil { - return err - } - if !strutil.StrListContains(priAddrs, priLeader.LeaderClusterAddress) { - return fmt.Errorf("failed to fine the expected primary cluster address %v in known_primary_cluster_addrs", priLeader.LeaderClusterAddress) - } - - return nil -} - -func UpdatePrimary(ctx context.Context, pri, sec VaultCluster) error { - // generate DR operation token to do update primary on vC to point to - // the new perfSec primary vD - rootToken, err := GenerateRoot(sec, GenerateRootDR) - if err != nil { - return err - } - - // secondary activation token - drToken, err := GenerateDRActivationToken(pri, sec.ClusterID(), "") - if err != nil { - return err - } - - // update-primary on vC (new perfSec Dr secondary) to point to - // the new perfSec Dr primary - secClient := sec.Nodes()[0].APIClient() - resp, err := secClient.Logical().Write("sys/replication/dr/secondary/update-primary", map[string]interface{}{ - "dr_operation_token": rootToken, - "token": drToken, - "ca_file": DefaultCAFile, - }) - if err != nil { - return err - } - if resp == nil { - return fmt.Errorf("nil status response during update primary") - } - - if _, err = WaitForActiveNode(ctx, sec); err != nil { - return err - } - - if err = WaitForDRReplicationState(ctx, sec, consts.ReplicationDRSecondary); err != nil { - return err - } - - if err = checkClusterAddr(ctx, pri, sec); err != nil { - return err - } - - return nil -} - -func SetupFourClusterReplication(ctx context.Context, pri, sec, pridr, secdr VaultCluster) error { - err := SetupTwoClusterPerfReplication(ctx, pri, sec) - if err != nil { - return err - } - err = SetupTwoClusterDRReplication(ctx, pri, pridr) - if err != nil { - return err - } - err = SetupTwoClusterDRReplication(ctx, sec, secdr) - if err != nil { - return err - } - return nil -} - -type ReplicationSet struct { - // By convention, we recommend the following naming scheme for - // clusters in this map: - // A: perf primary - // B: primary's DR - // C: first perf secondary of A - // D: C's DR - // E: second perf secondary of A - // F: E's DR - // ... etc. - // - // We use generic names rather than role-specific names because - // that's less confusing when promotions take place that result in role - // changes. In other words, if D gets promoted to replace C as a perf - // secondary, and C gets demoted and updated to become D's DR secondary, - // they should maintain their initial names of D and C throughout. - Clusters map[string]VaultCluster - Builder ClusterBuilder - Logger hclog.Logger - CA *CA -} - -type ClusterBuilder func(ctx context.Context, name string, logger hclog.Logger) (VaultCluster, error) - -func NewReplicationSet(b ClusterBuilder) (*ReplicationSet, error) { - return &ReplicationSet{ - Clusters: map[string]VaultCluster{}, - Builder: b, - Logger: hclog.NewNullLogger(), - }, nil -} - -func (r *ReplicationSet) StandardPerfReplication(ctx context.Context) error { - for _, name := range []string{"A", "C"} { - if _, ok := r.Clusters[name]; !ok { - cluster, err := r.Builder(ctx, name, r.Logger) - if err != nil { - return err - } - r.Clusters[name] = cluster - } - } - - ctx, cancel := context.WithTimeout(context.Background(), time.Minute) - defer cancel() - err := SetupTwoClusterPerfReplication(ctx, r.Clusters["A"], r.Clusters["C"]) - if err != nil { - return err - } - - return nil -} - -func (r *ReplicationSet) StandardDRReplication(ctx context.Context) error { - for _, name := range []string{"A", "B"} { - if _, ok := r.Clusters[name]; !ok { - cluster, err := r.Builder(ctx, name, r.Logger) - if err != nil { - return err - } - r.Clusters[name] = cluster - } - } - - ctx, cancel := context.WithTimeout(context.Background(), time.Minute) - defer cancel() - err := SetupTwoClusterDRReplication(ctx, r.Clusters["A"], r.Clusters["B"]) - if err != nil { - return err - } - - return nil -} - -func (r *ReplicationSet) GetFourReplicationCluster(ctx context.Context) error { - for _, name := range []string{"A", "B", "C", "D"} { - if _, ok := r.Clusters[name]; !ok { - cluster, err := r.Builder(ctx, name, r.Logger) - if err != nil { - return err - } - r.Clusters[name] = cluster - } - } - - ctx, cancel := context.WithTimeout(context.Background(), time.Minute) - defer cancel() - err := SetupFourClusterReplication(ctx, r.Clusters["A"], r.Clusters["C"], r.Clusters["B"], r.Clusters["D"]) - if err != nil { - return err - } - return nil -} - -func (r *ReplicationSet) Cleanup() { - for _, cluster := range r.Clusters { - cluster.Cleanup() - } -} - -func WaitForPerfReplicationConnectionStatus(ctx context.Context, client *api.Client) error { - type Primary struct { - APIAddress string `mapstructure:"api_address"` - ConnectionStatus string `mapstructure:"connection_status"` - ClusterAddress string `mapstructure:"cluster_address"` - LastHeartbeat string `mapstructure:"last_heartbeat"` - } - type Status struct { - Primaries []Primary `mapstructure:"primaries"` - } - return WaitForPerfReplicationStatus(ctx, client, func(m map[string]interface{}) error { - var status Status - err := mapstructure.Decode(m, &status) - if err != nil { - return err - } - if len(status.Primaries) == 0 { - return fmt.Errorf("primaries is zero") - } - for _, v := range status.Primaries { - if v.ConnectionStatus == "connected" { - return nil - } - } - return fmt.Errorf("no primaries connected") - }) -} - -func WaitForPerfReplicationStatus(ctx context.Context, client *api.Client, accept func(map[string]interface{}) error) error { - var err error - var secret *api.Secret - for ctx.Err() == nil { - secret, err = client.Logical().Read("sys/replication/performance/status") - if err == nil && secret != nil && secret.Data != nil { - if err = accept(secret.Data); err == nil { - return nil - } - } - time.Sleep(500 * time.Millisecond) - } - return fmt.Errorf("unable to get acceptable replication status within allotted time: error=%v secret=%#v", err, secret) -} diff --git a/sdk/helper/testcluster/types.go b/sdk/helper/testcluster/types.go deleted file mode 100644 index 084413521ce883..00000000000000 --- a/sdk/helper/testcluster/types.go +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package testcluster - -import ( - "crypto/ecdsa" - "crypto/tls" - "crypto/x509" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/hashicorp/vault/api" -) - -type VaultClusterNode interface { - APIClient() *api.Client - TLSConfig() *tls.Config -} - -type VaultCluster interface { - Nodes() []VaultClusterNode - GetBarrierKeys() [][]byte - GetRecoveryKeys() [][]byte - GetBarrierOrRecoveryKeys() [][]byte - SetBarrierKeys([][]byte) - SetRecoveryKeys([][]byte) - GetCACertPEMFile() string - Cleanup() - ClusterID() string - NamedLogger(string) hclog.Logger - SetRootToken(token string) - GetRootToken() string -} - -type VaultNodeConfig struct { - // Not configurable because cluster creator wants to control these: - // PluginDirectory string `hcl:"plugin_directory"` - // APIAddr string `hcl:"api_addr"` - // ClusterAddr string `hcl:"cluster_addr"` - // Storage *Storage `hcl:"-"` - // HAStorage *Storage `hcl:"-"` - // DisableMlock bool `hcl:"disable_mlock"` - // ClusterName string `hcl:"cluster_name"` - - // Not configurable yet: - // Listeners []*Listener `hcl:"-"` - // Seals []*KMS `hcl:"-"` - // Entropy *Entropy `hcl:"-"` - // Telemetry *Telemetry `hcl:"telemetry"` - // HCPLinkConf *HCPLinkConfig `hcl:"cloud"` - // PidFile string `hcl:"pid_file"` - // ServiceRegistrationType string - // ServiceRegistrationOptions map[string]string - - StorageOptions map[string]string - - DefaultMaxRequestDuration time.Duration `json:"default_max_request_duration"` - LogFormat string `json:"log_format"` - LogLevel string `json:"log_level"` - CacheSize int `json:"cache_size"` - DisableCache bool `json:"disable_cache"` - DisablePrintableCheck bool `json:"disable_printable_check"` - EnableUI bool `json:"ui"` - MaxLeaseTTL time.Duration `json:"max_lease_ttl"` - DefaultLeaseTTL time.Duration `json:"default_lease_ttl"` - ClusterCipherSuites string `json:"cluster_cipher_suites"` - PluginFileUid int `json:"plugin_file_uid"` - PluginFilePermissions int `json:"plugin_file_permissions"` - EnableRawEndpoint bool `json:"raw_storage_endpoint"` - DisableClustering bool `json:"disable_clustering"` - DisablePerformanceStandby bool `json:"disable_performance_standby"` - DisableSealWrap bool `json:"disable_sealwrap"` - DisableIndexing bool `json:"disable_indexing"` - DisableSentinelTrace bool `json:"disable_sentinel"` - EnableResponseHeaderHostname bool `json:"enable_response_header_hostname"` - LogRequestsLevel string `json:"log_requests_level"` - EnableResponseHeaderRaftNodeID bool `json:"enable_response_header_raft_node_id"` - LicensePath string `json:"license_path"` -} - -type ClusterNode struct { - APIAddress string `json:"api_address"` -} - -type ClusterJson struct { - Nodes []ClusterNode `json:"nodes"` - CACertPath string `json:"ca_cert_path"` - RootToken string `json:"root_token"` -} - -type ClusterOptions struct { - ClusterName string - KeepStandbysSealed bool - SkipInit bool - CACert []byte - NumCores int - TmpDir string - Logger hclog.Logger - VaultNodeConfig *VaultNodeConfig - VaultLicense string -} - -type CA struct { - CACert *x509.Certificate - CACertBytes []byte - CACertPEM []byte - CACertPEMFile string - CAKey *ecdsa.PrivateKey - CAKeyPEM []byte -} diff --git a/sdk/helper/testcluster/util.go b/sdk/helper/testcluster/util.go deleted file mode 100644 index 4ecf5f53389988..00000000000000 --- a/sdk/helper/testcluster/util.go +++ /dev/null @@ -1,356 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package testcluster - -import ( - "context" - "encoding/base64" - "encoding/hex" - "fmt" - "sync/atomic" - "time" - - "github.com/hashicorp/go-multierror" - "github.com/hashicorp/go-uuid" - "github.com/hashicorp/vault/api" - "github.com/hashicorp/vault/sdk/helper/xor" -) - -// Note that OSS standbys will not accept seal requests. And ent perf standbys -// may fail it as well if they haven't yet been able to get "elected" as perf standbys. -func SealNode(ctx context.Context, cluster VaultCluster, nodeIdx int) error { - if nodeIdx >= len(cluster.Nodes()) { - return fmt.Errorf("invalid nodeIdx %d for cluster", nodeIdx) - } - node := cluster.Nodes()[nodeIdx] - client := node.APIClient() - - err := client.Sys().SealWithContext(ctx) - if err != nil { - return err - } - - return NodeSealed(ctx, cluster, nodeIdx) -} - -func SealAllNodes(ctx context.Context, cluster VaultCluster) error { - for i := range cluster.Nodes() { - if err := SealNode(ctx, cluster, i); err != nil { - return err - } - } - return nil -} - -func UnsealNode(ctx context.Context, cluster VaultCluster, nodeIdx int) error { - if nodeIdx >= len(cluster.Nodes()) { - return fmt.Errorf("invalid nodeIdx %d for cluster", nodeIdx) - } - node := cluster.Nodes()[nodeIdx] - client := node.APIClient() - - for _, key := range cluster.GetBarrierOrRecoveryKeys() { - _, err := client.Sys().UnsealWithContext(ctx, hex.EncodeToString(key)) - if err != nil { - return err - } - } - - return NodeHealthy(ctx, cluster, nodeIdx) -} - -func UnsealAllNodes(ctx context.Context, cluster VaultCluster) error { - for i := range cluster.Nodes() { - if err := UnsealNode(ctx, cluster, i); err != nil { - return err - } - } - return nil -} - -func NodeSealed(ctx context.Context, cluster VaultCluster, nodeIdx int) error { - if nodeIdx >= len(cluster.Nodes()) { - return fmt.Errorf("invalid nodeIdx %d for cluster", nodeIdx) - } - node := cluster.Nodes()[nodeIdx] - client := node.APIClient() - - var health *api.HealthResponse - var err error - for ctx.Err() == nil { - health, err = client.Sys().HealthWithContext(ctx) - switch { - case err != nil: - case !health.Sealed: - err = fmt.Errorf("unsealed: %#v", health) - default: - return nil - } - time.Sleep(500 * time.Millisecond) - } - return fmt.Errorf("node %d is not sealed: %v", nodeIdx, err) -} - -func WaitForNCoresSealed(ctx context.Context, cluster VaultCluster, n int) error { - ctx, cancel := context.WithCancel(ctx) - defer cancel() - - errs := make(chan error) - for i := range cluster.Nodes() { - go func(i int) { - var err error - for ctx.Err() == nil { - err = NodeSealed(ctx, cluster, i) - if err == nil { - errs <- nil - return - } - time.Sleep(100 * time.Millisecond) - } - if err == nil { - err = ctx.Err() - } - errs <- err - }(i) - } - - var merr *multierror.Error - var sealed int - for range cluster.Nodes() { - err := <-errs - if err != nil { - merr = multierror.Append(merr, err) - } else { - sealed++ - if sealed == n { - return nil - } - } - } - - return fmt.Errorf("%d cores were not sealed, errs: %v", n, merr.ErrorOrNil()) -} - -func NodeHealthy(ctx context.Context, cluster VaultCluster, nodeIdx int) error { - if nodeIdx >= len(cluster.Nodes()) { - return fmt.Errorf("invalid nodeIdx %d for cluster", nodeIdx) - } - node := cluster.Nodes()[nodeIdx] - client := node.APIClient() - - var health *api.HealthResponse - var err error - for ctx.Err() == nil { - health, err = client.Sys().HealthWithContext(ctx) - switch { - case err != nil: - case health == nil: - err = fmt.Errorf("nil response to health check") - case health.Sealed: - err = fmt.Errorf("sealed: %#v", health) - default: - return nil - } - time.Sleep(500 * time.Millisecond) - } - return fmt.Errorf("node %d is unhealthy: %v", nodeIdx, err) -} - -func LeaderNode(ctx context.Context, cluster VaultCluster) (int, error) { - for i, node := range cluster.Nodes() { - client := node.APIClient() - ctx, cancel := context.WithTimeout(ctx, 100*time.Millisecond) - resp, err := client.Sys().LeaderWithContext(ctx) - cancel() - if err != nil || resp == nil || !resp.IsSelf { - continue - } - return i, nil - } - return -1, fmt.Errorf("no leader found") -} - -func WaitForActiveNode(ctx context.Context, cluster VaultCluster) (int, error) { - for ctx.Err() == nil { - if idx, _ := LeaderNode(ctx, cluster); idx != -1 { - return idx, nil - } - time.Sleep(500 * time.Millisecond) - } - return -1, ctx.Err() -} - -func WaitForActiveNodeAndPerfStandbys(ctx context.Context, cluster VaultCluster) error { - logger := cluster.NamedLogger("WaitForActiveNodeAndPerfStandbys") - // This WaitForActiveNode was added because after a Raft cluster is sealed - // and then unsealed, when it comes up it may have a different leader than - // Core0, making this helper fail. - // A sleep before calling WaitForActiveNodeAndPerfStandbys seems to sort - // things out, but so apparently does this. We should be able to eliminate - // this call to WaitForActiveNode by reworking the logic in this method. - if _, err := WaitForActiveNode(ctx, cluster); err != nil { - return err - } - - if len(cluster.Nodes()) == 1 { - return nil - } - - expectedStandbys := len(cluster.Nodes()) - 1 - - mountPoint, err := uuid.GenerateUUID() - if err != nil { - return err - } - leaderClient := cluster.Nodes()[0].APIClient() - - for ctx.Err() == nil { - err = leaderClient.Sys().MountWithContext(ctx, mountPoint, &api.MountInput{ - Type: "kv", - Local: true, - }) - if err == nil { - break - } - time.Sleep(1 * time.Second) - } - if err != nil { - return fmt.Errorf("unable to mount KV engine: %v", err) - } - path := mountPoint + "/waitforactivenodeandperfstandbys" - var standbys, actives int64 - errchan := make(chan error, len(cluster.Nodes())) - for i := range cluster.Nodes() { - go func(coreNo int) { - node := cluster.Nodes()[coreNo] - client := node.APIClient() - val := 1 - var err error - defer func() { - errchan <- err - }() - - var lastWAL uint64 - for ctx.Err() == nil { - _, err = leaderClient.Logical().WriteWithContext(ctx, path, map[string]interface{}{ - "bar": val, - }) - val++ - time.Sleep(250 * time.Millisecond) - if err != nil { - continue - } - var leader *api.LeaderResponse - leader, err = client.Sys().LeaderWithContext(ctx) - if err != nil { - continue - } - switch { - case leader.IsSelf: - logger.Trace("waiting for core", "core", coreNo, "isLeader", true) - atomic.AddInt64(&actives, 1) - return - case leader.PerfStandby && leader.PerfStandbyLastRemoteWAL > 0: - switch { - case lastWAL == 0: - lastWAL = leader.PerfStandbyLastRemoteWAL - logger.Trace("waiting for core", "core", coreNo, "lastRemoteWAL", leader.PerfStandbyLastRemoteWAL, "lastWAL", lastWAL) - case lastWAL < leader.PerfStandbyLastRemoteWAL: - logger.Trace("waiting for core", "core", coreNo, "lastRemoteWAL", leader.PerfStandbyLastRemoteWAL, "lastWAL", lastWAL) - atomic.AddInt64(&standbys, 1) - return - } - } - } - }(i) - } - - errs := make([]error, 0, len(cluster.Nodes())) - for range cluster.Nodes() { - errs = append(errs, <-errchan) - } - if actives != 1 || int(standbys) != expectedStandbys { - return fmt.Errorf("expected 1 active core and %d standbys, got %d active and %d standbys, errs: %v", - expectedStandbys, actives, standbys, errs) - } - - for ctx.Err() == nil { - err = leaderClient.Sys().UnmountWithContext(ctx, mountPoint) - if err == nil { - break - } - time.Sleep(time.Second) - } - if err != nil { - return fmt.Errorf("unable to unmount KV engine on primary") - } - return nil -} - -type GenerateRootKind int - -const ( - GenerateRootRegular GenerateRootKind = iota - GenerateRootDR - GenerateRecovery -) - -func GenerateRoot(cluster VaultCluster, kind GenerateRootKind) (string, error) { - // If recovery keys supported, use those to perform root token generation instead - keys := cluster.GetBarrierOrRecoveryKeys() - - client := cluster.Nodes()[0].APIClient() - - var err error - var status *api.GenerateRootStatusResponse - switch kind { - case GenerateRootRegular: - status, err = client.Sys().GenerateRootInit("", "") - case GenerateRootDR: - status, err = client.Sys().GenerateDROperationTokenInit("", "") - case GenerateRecovery: - status, err = client.Sys().GenerateRecoveryOperationTokenInit("", "") - } - if err != nil { - return "", err - } - - if status.Required > len(keys) { - return "", fmt.Errorf("need more keys than have, need %d have %d", status.Required, len(keys)) - } - - otp := status.OTP - - for i, key := range keys { - if i >= status.Required { - break - } - - strKey := base64.StdEncoding.EncodeToString(key) - switch kind { - case GenerateRootRegular: - status, err = client.Sys().GenerateRootUpdate(strKey, status.Nonce) - case GenerateRootDR: - status, err = client.Sys().GenerateDROperationTokenUpdate(strKey, status.Nonce) - case GenerateRecovery: - status, err = client.Sys().GenerateRecoveryOperationTokenUpdate(strKey, status.Nonce) - } - if err != nil { - return "", err - } - } - if !status.Complete { - return "", fmt.Errorf("generate root operation did not end successfully") - } - - tokenBytes, err := base64.RawStdEncoding.DecodeString(status.EncodedToken) - if err != nil { - return "", err - } - tokenBytes, err = xor.XORBytes(tokenBytes, []byte(otp)) - if err != nil { - return "", err - } - return string(tokenBytes), nil -} diff --git a/sdk/helper/testhelpers/output.go b/sdk/helper/testhelpers/output.go deleted file mode 100644 index 769a63a1894986..00000000000000 --- a/sdk/helper/testhelpers/output.go +++ /dev/null @@ -1,81 +0,0 @@ -package testhelpers - -import ( - "crypto/sha256" - "fmt" - "reflect" - - "github.com/mitchellh/go-testing-interface" - "github.com/mitchellh/mapstructure" -) - -// ToMap renders an input value of any type as a map. This is intended for -// logging human-readable data dumps in test logs, so it uses the `json` -// tags on struct fields: this makes it easy to exclude `"-"` values that -// are typically not interesting, respect omitempty, etc. -// -// We also replace any []byte fields with a hash of their value. -// This is usually sufficient for test log purposes, and is a lot more readable -// than a big array of individual byte values like Go would normally stringify a -// byte slice. -func ToMap(in any) (map[string]any, error) { - temp := make(map[string]any) - cfg := &mapstructure.DecoderConfig{ - TagName: "json", - IgnoreUntaggedFields: true, - Result: &temp, - } - md, err := mapstructure.NewDecoder(cfg) - if err != nil { - return nil, err - } - err = md.Decode(in) - if err != nil { - return nil, err - } - - // mapstructure doesn't call the DecodeHook for each field when doing - // struct->map conversions, but it does for map->map, so call it a second - // time to convert each []byte field. - out := make(map[string]any) - md2, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &out, - DecodeHook: func(from reflect.Type, to reflect.Type, data interface{}) (interface{}, error) { - if from.Kind() != reflect.Slice || from.Elem().Kind() != reflect.Uint8 { - return data, nil - } - b := data.([]byte) - return fmt.Sprintf("%x", sha256.Sum256(b)), nil - }, - }) - if err != nil { - return nil, err - } - err = md2.Decode(temp) - if err != nil { - return nil, err - } - - return out, nil -} - -// ToString renders its input using ToMap, and returns a string containing the -// result or an error if that fails. -func ToString(in any) string { - m, err := ToMap(in) - if err != nil { - return err.Error() - } - return fmt.Sprintf("%v", m) -} - -// StringOrDie renders its input using ToMap, and returns a string containing the -// result. If rendering yields an error, calls t.Fatal. -func StringOrDie(t testing.T, in any) string { - t.Helper() - m, err := ToMap(in) - if err != nil { - t.Fatal(err) - } - return fmt.Sprintf("%v", m) -} diff --git a/sdk/helper/testhelpers/output_test.go b/sdk/helper/testhelpers/output_test.go deleted file mode 100644 index 257d9480942f68..00000000000000 --- a/sdk/helper/testhelpers/output_test.go +++ /dev/null @@ -1,45 +0,0 @@ -package testhelpers - -import ( - "fmt" - "reflect" - "testing" -) - -func TestToMap(t *testing.T) { - type s struct { - A string `json:"a"` - B []byte `json:"b"` - C map[string]string `json:"c"` - D string `json:"-"` - } - type args struct { - in s - } - tests := []struct { - name string - args args - want string - wantErr bool - }{ - { - name: "basic", - args: args{s{A: "a", B: []byte("bytes"), C: map[string]string{"k": "v"}, D: "d"}}, - want: "map[a:a b:277089d91c0bdf4f2e6862ba7e4a07605119431f5d13f726dd352b06f1b206a9 c:map[k:v]]", - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - m, err := ToMap(&tt.args.in) - if (err != nil) != tt.wantErr { - t.Errorf("ToMap() error = %v, wantErr %v", err, tt.wantErr) - return - } - got := fmt.Sprintf("%s", m) - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("ToMap() got = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/sdk/helper/testhelpers/schema/response_validation.go b/sdk/helper/testhelpers/schema/response_validation.go index 430d1754a56f9b..2a2d6b3b5b3a80 100644 --- a/sdk/helper/testhelpers/schema/response_validation.go +++ b/sdk/helper/testhelpers/schema/response_validation.go @@ -1,24 +1,20 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package schema import ( "encoding/json" "fmt" "net/http" - "strings" "testing" "github.com/hashicorp/vault/sdk/framework" "github.com/hashicorp/vault/sdk/logical" ) -// ValidateResponse is a test helper that validates whether the given response -// object conforms to the response schema (schema.Fields). It cycles through -// the data map and validates conversions in the schema. In "strict" mode, this -// function will also ensure that the data map has all schema-required fields -// and does not have any fields outside of the schema. +// ValidateResponseData is a test helper that validates whether the given +// response data map conforms to the response schema (schema.Fields). It cycles +// through the data map and validates conversions in the schema. In "strict" +// mode, this function will also ensure that the data map has all schema's +// requred fields and does not have any fields outside of the schema. func ValidateResponse(t *testing.T, schema *framework.Response, response *logical.Response, strict bool) { t.Helper() @@ -29,11 +25,11 @@ func ValidateResponse(t *testing.T, schema *framework.Response, response *logica } } -// ValidateResponseData is a test helper that validates whether the given -// response data map conforms to the response schema (schema.Fields). It cycles -// through the data map and validates conversions in the schema. In "strict" -// mode, this function will also ensure that the data map has all schema's -// requred fields and does not have any fields outside of the schema. +// ValidateResponse is a test helper that validates whether the given response +// object conforms to the response schema (schema.Fields). It cycles through +// the data map and validates conversions in the schema. In "strict" mode, this +// function will also ensure that the data map has all schema-required fields +// and does not have any fields outside of the schema. func ValidateResponseData(t *testing.T, schema *framework.Response, data map[string]interface{}, strict bool) { t.Helper() @@ -53,16 +49,6 @@ func validateResponseDataImpl(schema *framework.Response, data map[string]interf return nil } - // Certain responses may come through with non-2xx status codes. While - // these are not always errors (e.g. 3xx redirection codes), we don't - // consider them for the purposes of schema validation - if status, exists := data[logical.HTTPStatusCode]; exists { - s, ok := status.(int) - if ok && (s < 200 || s > 299) { - return nil - } - } - // Marshal the data to JSON and back to convert the map's values into // JSON strings expected by Validate() and ValidateStrict(). This is // not efficient and is done for testing purposes only. @@ -79,24 +65,6 @@ func validateResponseDataImpl(schema *framework.Response, data map[string]interf return fmt.Errorf("failed to unmashal data: %w", err) } - // these are special fields that will not show up in the final response and - // should be ignored - for _, field := range []string{ - logical.HTTPContentType, - logical.HTTPRawBody, - logical.HTTPStatusCode, - logical.HTTPRawBodyAlreadyJSONDecoded, - logical.HTTPCacheControlHeader, - logical.HTTPPragmaHeader, - logical.HTTPWWWAuthenticateHeader, - } { - delete(dataWithStringValues, field) - - if _, ok := schema.Fields[field]; ok { - return fmt.Errorf("encountered a reserved field in response schema: %s", field) - } - } - // Validate fd := framework.FieldData{ Raw: dataWithStringValues, @@ -110,8 +78,7 @@ func validateResponseDataImpl(schema *framework.Response, data map[string]interf return fd.Validate() } -// FindResponseSchema is a test helper to extract response schema from the -// given framework path / operation. +// FindResponseSchema is a test helper to extract the response schema from a given framework path / operation func FindResponseSchema(t *testing.T, paths []*framework.Path, pathIdx int, operation logical.Operation) *framework.Response { t.Helper() @@ -159,44 +126,3 @@ func GetResponseSchema(t *testing.T, path *framework.Path, operation logical.Ope return &schemaResponses[0] } - -// ResponseValidatingCallback can be used in setting up a [vault.TestCluster] -// that validates every response against the openapi specifications. -// -// [vault.TestCluster]: https://pkg.go.dev/github.com/hashicorp/vault/vault#TestCluster -func ResponseValidatingCallback(t *testing.T) func(logical.Backend, *logical.Request, *logical.Response) { - type PathRouter interface { - Route(string) *framework.Path - } - - return func(b logical.Backend, req *logical.Request, resp *logical.Response) { - t.Helper() - - if b == nil { - t.Fatalf("non-nil backend required") - } - - backend, ok := b.(PathRouter) - if !ok { - t.Fatalf("could not cast %T to have `Route(string) *framework.Path`", b) - } - - // The full request path includes the backend but when passing to the - // backend, we have to trim the mount point: - // `sys/mounts/secret` -> `mounts/secret` - // `auth/token/create` -> `create` - requestPath := strings.TrimPrefix(req.Path, req.MountPoint) - - route := backend.Route(requestPath) - if route == nil { - t.Fatalf("backend %T could not find a route for %s", b, req.Path) - } - - ValidateResponse( - t, - GetResponseSchema(t, route, req.Operation), - resp, - true, - ) - } -} diff --git a/sdk/helper/testhelpers/schema/response_validation_test.go b/sdk/helper/testhelpers/schema/response_validation_test.go index 4f4aa8b1cc3c68..976389c444dae2 100644 --- a/sdk/helper/testhelpers/schema/response_validation_test.go +++ b/sdk/helper/testhelpers/schema/response_validation_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package schema import ( @@ -252,90 +249,6 @@ func TestValidateResponse(t *testing.T) { strict: false, errorExpected: false, }, - - "empty schema, response has http_raw_body, strict": { - schema: &framework.Response{ - Fields: map[string]*framework.FieldSchema{}, - }, - response: map[string]interface{}{ - "http_raw_body": "foo", - }, - strict: true, - errorExpected: false, - }, - - "empty schema, response has http_raw_body, not strict": { - schema: &framework.Response{ - Fields: map[string]*framework.FieldSchema{}, - }, - response: map[string]interface{}{ - "http_raw_body": "foo", - }, - strict: false, - errorExpected: false, - }, - - "string schema field, response has non-200 http_status_code, strict": { - schema: &framework.Response{ - Fields: map[string]*framework.FieldSchema{ - "foo": { - Type: framework.TypeString, - }, - }, - }, - response: map[string]interface{}{ - "http_status_code": 304, - }, - strict: true, - errorExpected: false, - }, - - "string schema field, response has non-200 http_status_code, not strict": { - schema: &framework.Response{ - Fields: map[string]*framework.FieldSchema{ - "foo": { - Type: framework.TypeString, - }, - }, - }, - response: map[string]interface{}{ - "http_status_code": 304, - }, - strict: false, - errorExpected: false, - }, - - "schema has http_raw_body, strict": { - schema: &framework.Response{ - Fields: map[string]*framework.FieldSchema{ - "http_raw_body": { - Type: framework.TypeString, - Required: false, - }, - }, - }, - response: map[string]interface{}{ - "http_raw_body": "foo", - }, - strict: true, - errorExpected: true, - }, - - "schema has http_raw_body, not strict": { - schema: &framework.Response{ - Fields: map[string]*framework.FieldSchema{ - "http_raw_body": { - Type: framework.TypeString, - Required: false, - }, - }, - }, - response: map[string]interface{}{ - "http_raw_body": "foo", - }, - strict: false, - errorExpected: true, - }, } for name, tc := range cases { diff --git a/sdk/helper/tlsutil/tlsutil.go b/sdk/helper/tlsutil/tlsutil.go index d91af3679e2e89..e1e9b9484bf4b6 100644 --- a/sdk/helper/tlsutil/tlsutil.go +++ b/sdk/helper/tlsutil/tlsutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // DEPRECATED: this has been moved to go-secure-stdlib and will be removed package tlsutil diff --git a/sdk/helper/tokenutil/tokenutil.go b/sdk/helper/tokenutil/tokenutil.go index 4319bd18236988..776b40501ed457 100644 --- a/sdk/helper/tokenutil/tokenutil.go +++ b/sdk/helper/tokenutil/tokenutil.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package tokenutil import ( @@ -78,9 +75,8 @@ func TokenFields() map[string]*framework.FieldSchema { Type: framework.TypeCommaStringSlice, Description: `Comma separated string or JSON list of CIDR blocks. If set, specifies the blocks of IP addresses which are allowed to use the generated token.`, DisplayAttrs: &framework.DisplayAttributes{ - Name: "Generated Token's Bound CIDRs", - Group: "Tokens", - Description: "A list of CIDR blocks. If set, specifies the blocks of IP addresses which are allowed to use the generated token.", + Name: "Generated Token's Bound CIDRs", + Group: "Tokens", }, }, @@ -124,9 +120,8 @@ func TokenFields() map[string]*framework.FieldSchema { Type: framework.TypeCommaStringSlice, Description: "Comma-separated list of policies", DisplayAttrs: &framework.DisplayAttributes{ - Name: "Generated Token's Policies", - Group: "Tokens", - Description: "A list of policies that will apply to the generated token for this user.", + Name: "Generated Token's Policies", + Group: "Tokens", }, }, diff --git a/sdk/helper/useragent/useragent.go b/sdk/helper/useragent/useragent.go index 53569e910a87a2..0dc4b2c9e999ca 100644 --- a/sdk/helper/useragent/useragent.go +++ b/sdk/helper/useragent/useragent.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package useragent import ( diff --git a/sdk/helper/useragent/useragent_test.go b/sdk/helper/useragent/useragent_test.go index 4677bb62face3b..c21b0c9475c196 100644 --- a/sdk/helper/useragent/useragent_test.go +++ b/sdk/helper/useragent/useragent_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package useragent import ( diff --git a/sdk/helper/wrapping/wrapinfo.go b/sdk/helper/wrapping/wrapinfo.go index 03a703013008dd..8d8e63340f9592 100644 --- a/sdk/helper/wrapping/wrapinfo.go +++ b/sdk/helper/wrapping/wrapinfo.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package wrapping import "time" diff --git a/sdk/helper/xor/xor.go b/sdk/helper/xor/xor.go index 098a673178558c..a1f1e90bc156f8 100644 --- a/sdk/helper/xor/xor.go +++ b/sdk/helper/xor/xor.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package xor import ( diff --git a/sdk/helper/xor/xor_test.go b/sdk/helper/xor/xor_test.go index 143345d9a5bde7..f50f525ce6390d 100644 --- a/sdk/helper/xor/xor_test.go +++ b/sdk/helper/xor/xor_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package xor import ( diff --git a/sdk/logical/acme_billing.go b/sdk/logical/acme_billing.go deleted file mode 100644 index 6e4f6ef398b839..00000000000000 --- a/sdk/logical/acme_billing.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package logical - -import "context" - -type ACMEBillingSystemView interface { - CreateActivityCountEventForIdentifiers(ctx context.Context, identifiers []string) error -} diff --git a/sdk/logical/audit.go b/sdk/logical/audit.go index 30c03e6113ac23..8ba70f37e01a41 100644 --- a/sdk/logical/audit.go +++ b/sdk/logical/audit.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical type LogInput struct { diff --git a/sdk/logical/auth.go b/sdk/logical/auth.go index 83d9daca12adc8..51de20345fddd4 100644 --- a/sdk/logical/auth.go +++ b/sdk/logical/auth.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/connection.go b/sdk/logical/connection.go index e590e6f59acc1c..5be8630770794d 100644 --- a/sdk/logical/connection.go +++ b/sdk/logical/connection.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/controlgroup.go b/sdk/logical/controlgroup.go index e166f00d1f8845..2ed1b07688d9a1 100644 --- a/sdk/logical/controlgroup.go +++ b/sdk/logical/controlgroup.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/error.go b/sdk/logical/error.go index 5605784b3e1324..68c8e13732021b 100644 --- a/sdk/logical/error.go +++ b/sdk/logical/error.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import "errors" diff --git a/sdk/logical/event.pb.go b/sdk/logical/event.pb.go index 22e908d91a3438..1925c6ae9870f2 100644 --- a/sdk/logical/event.pb.go +++ b/sdk/logical/event.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/sdk/logical/event.proto b/sdk/logical/event.proto index 6e36e5e70f9ace..594bcf1dde09b8 100644 --- a/sdk/logical/event.proto +++ b/sdk/logical/event.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/sdk/logical"; diff --git a/sdk/logical/events.go b/sdk/logical/events.go index cbd3f736902448..e96e6d709005a0 100644 --- a/sdk/logical/events.go +++ b/sdk/logical/events.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/identity.pb.go b/sdk/logical/identity.pb.go index fedc5f5c202cbb..78fb704df0447b 100644 --- a/sdk/logical/identity.pb.go +++ b/sdk/logical/identity.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/sdk/logical/identity.proto b/sdk/logical/identity.proto index 4a1f3413750d3c..743f85e983f4b3 100644 --- a/sdk/logical/identity.proto +++ b/sdk/logical/identity.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/sdk/logical"; diff --git a/sdk/logical/lease.go b/sdk/logical/lease.go index e00fb52d64b170..97bbe4f6582bc8 100644 --- a/sdk/logical/lease.go +++ b/sdk/logical/lease.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/lease_test.go b/sdk/logical/lease_test.go index aee2bbdbcb3bf0..050b7db8e92bd2 100644 --- a/sdk/logical/lease_test.go +++ b/sdk/logical/lease_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/logical.go b/sdk/logical/logical.go index 51928d6e67e1fa..315d889295b60a 100644 --- a/sdk/logical/logical.go +++ b/sdk/logical/logical.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( @@ -166,6 +163,11 @@ type Auditor interface { AuditResponse(ctx context.Context, input *LogInput) error } +// Externaler allows us to check if a backend is running externally (i.e., over GRPC) +type Externaler interface { + IsExternal() bool +} + type PluginVersion struct { Version string } diff --git a/sdk/logical/logical_storage.go b/sdk/logical/logical_storage.go index b4fbc2b72fdb33..16b85cd797e0e9 100644 --- a/sdk/logical/logical_storage.go +++ b/sdk/logical/logical_storage.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/managed_key.go b/sdk/logical/managed_key.go index 04727f9d7f424f..eaf4d107c89c72 100644 --- a/sdk/logical/managed_key.go +++ b/sdk/logical/managed_key.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/plugin.pb.go b/sdk/logical/plugin.pb.go index 19b18d89e1862c..5221478de6bc8b 100644 --- a/sdk/logical/plugin.pb.go +++ b/sdk/logical/plugin.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/sdk/logical/plugin.proto b/sdk/logical/plugin.proto index 0eaa3c57c8492a..f2df6c75d97c38 100644 --- a/sdk/logical/plugin.proto +++ b/sdk/logical/plugin.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; option go_package = "github.com/hashicorp/vault/sdk/logical"; diff --git a/sdk/logical/request.go b/sdk/logical/request.go index 8a6ac241fe8035..d774fd176b4a74 100644 --- a/sdk/logical/request.go +++ b/sdk/logical/request.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( @@ -156,22 +153,6 @@ type Request struct { // backends can be tied to the mount it belongs to. MountAccessor string `json:"mount_accessor" structs:"mount_accessor" mapstructure:"mount_accessor" sentinel:""` - // mountRunningVersion is used internally to propagate the semantic version - // of the mounted plugin as reported by its vault.MountEntry to audit logging - mountRunningVersion string - - // mountRunningSha256 is used internally to propagate the encoded sha256 - // of the mounted plugin as reported its vault.MountEntry to audit logging - mountRunningSha256 string - - // mountIsExternalPlugin is used internally to propagate whether - // the backend of the mounted plugin is running externally (i.e., over GRPC) - // to audit logging - mountIsExternalPlugin bool - - // mountClass is used internally to propagate the mount class of the mounted plugin to audit logging - mountClass string - // WrapInfo contains requested response wrapping parameters WrapInfo *RequestWrapInfo `json:"wrap_info" structs:"wrap_info" mapstructure:"wrap_info" sentinel:""` @@ -243,9 +224,6 @@ type Request struct { // InboundSSCToken is the token that arrives on an inbound request, supplied // by the vault user. InboundSSCToken string - - // When a request has been forwarded, contains information of the host the request was forwarded 'from' - ForwardedFrom string `json:"forwarded_from,omitempty"` } // Clone returns a deep copy of the request by using copystructure @@ -302,38 +280,6 @@ func (r *Request) SentinelKeys() []string { } } -func (r *Request) MountRunningVersion() string { - return r.mountRunningVersion -} - -func (r *Request) SetMountRunningVersion(mountRunningVersion string) { - r.mountRunningVersion = mountRunningVersion -} - -func (r *Request) MountRunningSha256() string { - return r.mountRunningSha256 -} - -func (r *Request) SetMountRunningSha256(mountRunningSha256 string) { - r.mountRunningSha256 = mountRunningSha256 -} - -func (r *Request) MountIsExternalPlugin() bool { - return r.mountIsExternalPlugin -} - -func (r *Request) SetMountIsExternalPlugin(mountIsExternalPlugin bool) { - r.mountIsExternalPlugin = mountIsExternalPlugin -} - -func (r *Request) MountClass() string { - return r.mountClass -} - -func (r *Request) SetMountClass(mountClass string) { - r.mountClass = mountClass -} - func (r *Request) LastRemoteWAL() uint64 { return r.lastRemoteWAL } @@ -420,7 +366,6 @@ const ( HelpOperation = "help" AliasLookaheadOperation = "alias-lookahead" ResolveRoleOperation = "resolve-role" - HeaderOperation = "header" // The operations below are called globally, the path is less relevant. RevokeOperation Operation = "revoke" diff --git a/sdk/logical/response.go b/sdk/logical/response.go index 9ea5bf6c572744..7e2a65406dd552 100644 --- a/sdk/logical/response.go +++ b/sdk/logical/response.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/response_util.go b/sdk/logical/response_util.go index aef0213ed9e82d..6d31a16ee7ed8d 100644 --- a/sdk/logical/response_util.go +++ b/sdk/logical/response_util.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( @@ -20,7 +17,7 @@ import ( func RespondErrorCommon(req *Request, resp *Response, err error) (int, error) { if err == nil && (resp == nil || !resp.IsError()) { switch { - case req.Operation == ReadOperation || req.Operation == HeaderOperation: + case req.Operation == ReadOperation: if resp == nil { return http.StatusNotFound, nil } diff --git a/sdk/logical/response_util_test.go b/sdk/logical/response_util_test.go index eafaa2fc761099..00d70a5c4a2ea8 100644 --- a/sdk/logical/response_util_test.go +++ b/sdk/logical/response_util_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( @@ -42,14 +39,6 @@ func TestResponseUtil_RespondErrorCommon_basic(t *testing.T) { respErr: nil, expectedStatus: 404, }, - { - title: "Header not found", - req: &Request{ - Operation: HeaderOperation, - }, - respErr: nil, - expectedStatus: 404, - }, { title: "List with response and no keys", req: &Request{ diff --git a/sdk/logical/secret.go b/sdk/logical/secret.go index e6b4d14d4732bd..a2128d86899465 100644 --- a/sdk/logical/secret.go +++ b/sdk/logical/secret.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import "fmt" diff --git a/sdk/logical/storage.go b/sdk/logical/storage.go index 16ba60b94875ce..062c3adb59af74 100644 --- a/sdk/logical/storage.go +++ b/sdk/logical/storage.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/storage_inmem.go b/sdk/logical/storage_inmem.go index 62ec58290a4b65..65368a070fe452 100644 --- a/sdk/logical/storage_inmem.go +++ b/sdk/logical/storage_inmem.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/storage_inmem_test.go b/sdk/logical/storage_inmem_test.go index 2ed776b20c38c9..8e0964fd4af8f0 100644 --- a/sdk/logical/storage_inmem_test.go +++ b/sdk/logical/storage_inmem_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/storage_test.go b/sdk/logical/storage_test.go index 1d6014dd9769cb..3b96b4dbef3406 100644 --- a/sdk/logical/storage_test.go +++ b/sdk/logical/storage_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/storage_view.go b/sdk/logical/storage_view.go index df40dca4fc53a4..2cd07715c2ae47 100644 --- a/sdk/logical/storage_view.go +++ b/sdk/logical/storage_view.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/system_view.go b/sdk/logical/system_view.go index 7301c752a16253..6a70e06c879dff 100644 --- a/sdk/logical/system_view.go +++ b/sdk/logical/system_view.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/testing.go b/sdk/logical/testing.go index a173c7c5f7b29f..8cb41e2e7c5800 100644 --- a/sdk/logical/testing.go +++ b/sdk/logical/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/token.go b/sdk/logical/token.go index a27a73a22dc27d..ebebd4ad9ca7a3 100644 --- a/sdk/logical/token.go +++ b/sdk/logical/token.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/token_test.go b/sdk/logical/token_test.go index 641d688b9dd1b0..e44c707a516597 100644 --- a/sdk/logical/token_test.go +++ b/sdk/logical/token_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/translate_response.go b/sdk/logical/translate_response.go index ef5ba5f220724a..de5ea8fdbe2149 100644 --- a/sdk/logical/translate_response.go +++ b/sdk/logical/translate_response.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package logical import ( diff --git a/sdk/logical/version.pb.go b/sdk/logical/version.pb.go index 9962824cbb15d3..613e80e24e11ca 100644 --- a/sdk/logical/version.pb.go +++ b/sdk/logical/version.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/sdk/logical/version.proto b/sdk/logical/version.proto index 860ddc54e27076..345051ae9de952 100644 --- a/sdk/logical/version.proto +++ b/sdk/logical/version.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; package logical; diff --git a/sdk/physical/cache.go b/sdk/physical/cache.go index cc318a4c0eb528..af40f538595742 100644 --- a/sdk/physical/cache.go +++ b/sdk/physical/cache.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/encoding.go b/sdk/physical/encoding.go index 49e00ae6ace588..dbde84cc6dc467 100644 --- a/sdk/physical/encoding.go +++ b/sdk/physical/encoding.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/entry.go b/sdk/physical/entry.go index 1d907425dc08d6..389fe6c81c1443 100644 --- a/sdk/physical/entry.go +++ b/sdk/physical/entry.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/error.go b/sdk/physical/error.go index 4af7b7d639fcc8..b547e4e4288d43 100644 --- a/sdk/physical/error.go +++ b/sdk/physical/error.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/file/file.go b/sdk/physical/file/file.go index d7ad9de3a2b091..e5e64e6efa41f6 100644 --- a/sdk/physical/file/file.go +++ b/sdk/physical/file/file.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package file import ( diff --git a/sdk/physical/file/file_test.go b/sdk/physical/file/file_test.go index 14c33094dec483..724b8a012a66bd 100644 --- a/sdk/physical/file/file_test.go +++ b/sdk/physical/file/file_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package file import ( diff --git a/sdk/physical/inmem/cache_test.go b/sdk/physical/inmem/cache_test.go index 3014fc1768da9d..e6e6dabfe37609 100644 --- a/sdk/physical/inmem/cache_test.go +++ b/sdk/physical/inmem/cache_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package inmem import ( diff --git a/sdk/physical/inmem/inmem.go b/sdk/physical/inmem/inmem.go index e4fa1f69ba2310..be16b4caa12fc6 100644 --- a/sdk/physical/inmem/inmem.go +++ b/sdk/physical/inmem/inmem.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package inmem import ( diff --git a/sdk/physical/inmem/inmem_ha.go b/sdk/physical/inmem/inmem_ha.go index 1db26ca7461f3a..64fcb3a66dce39 100644 --- a/sdk/physical/inmem/inmem_ha.go +++ b/sdk/physical/inmem/inmem_ha.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package inmem import ( diff --git a/sdk/physical/inmem/inmem_ha_test.go b/sdk/physical/inmem/inmem_ha_test.go index bb427a385e991d..850d63a230be1e 100644 --- a/sdk/physical/inmem/inmem_ha_test.go +++ b/sdk/physical/inmem/inmem_ha_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package inmem import ( diff --git a/sdk/physical/inmem/inmem_test.go b/sdk/physical/inmem/inmem_test.go index 56c029a43303b9..678061326a218a 100644 --- a/sdk/physical/inmem/inmem_test.go +++ b/sdk/physical/inmem/inmem_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package inmem import ( diff --git a/sdk/physical/inmem/physical_view_test.go b/sdk/physical/inmem/physical_view_test.go index 24b47d7ae7ec9e..ea4a3ce24f1877 100644 --- a/sdk/physical/inmem/physical_view_test.go +++ b/sdk/physical/inmem/physical_view_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package inmem import ( diff --git a/sdk/physical/inmem/transactions_test.go b/sdk/physical/inmem/transactions_test.go index 71a4829f96647f..7ed3d59492412b 100644 --- a/sdk/physical/inmem/transactions_test.go +++ b/sdk/physical/inmem/transactions_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package inmem import ( diff --git a/sdk/physical/latency.go b/sdk/physical/latency.go index f4cced5270b19e..c00780598cf479 100644 --- a/sdk/physical/latency.go +++ b/sdk/physical/latency.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/physical.go b/sdk/physical/physical.go index c0e7d2ef8895cf..808abd50fcd853 100644 --- a/sdk/physical/physical.go +++ b/sdk/physical/physical.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/physical_access.go b/sdk/physical/physical_access.go index 048ee83856670f..7497313afca22b 100644 --- a/sdk/physical/physical_access.go +++ b/sdk/physical/physical_access.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/physical_view.go b/sdk/physical/physical_view.go index 0369e13778a049..189ac93172a53c 100644 --- a/sdk/physical/physical_view.go +++ b/sdk/physical/physical_view.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/testing.go b/sdk/physical/testing.go index 0c6a021d3d237a..6e0ddfcc0eae0a 100644 --- a/sdk/physical/testing.go +++ b/sdk/physical/testing.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/physical/transactions.go b/sdk/physical/transactions.go index 8d4e33321e2cba..a943c6bd95efa4 100644 --- a/sdk/physical/transactions.go +++ b/sdk/physical/transactions.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package physical import ( diff --git a/sdk/plugin/backend.go b/sdk/plugin/backend.go index 2da1378eaa91a9..46e3710fdbaced 100644 --- a/sdk/plugin/backend.go +++ b/sdk/plugin/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/grpc_backend.go b/sdk/plugin/grpc_backend.go index f0114b94657908..a65eeebeb43229 100644 --- a/sdk/plugin/grpc_backend.go +++ b/sdk/plugin/grpc_backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/grpc_backend_client.go b/sdk/plugin/grpc_backend_client.go index a343356d19d865..5d2d52812b30d2 100644 --- a/sdk/plugin/grpc_backend_client.go +++ b/sdk/plugin/grpc_backend_client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/grpc_backend_server.go b/sdk/plugin/grpc_backend_server.go index 3356e463c8ad87..267e84c965c147 100644 --- a/sdk/plugin/grpc_backend_server.go +++ b/sdk/plugin/grpc_backend_server.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/grpc_backend_test.go b/sdk/plugin/grpc_backend_test.go index 01a6ea609f7d68..2f665beb044ab0 100644 --- a/sdk/plugin/grpc_backend_test.go +++ b/sdk/plugin/grpc_backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/grpc_events.go b/sdk/plugin/grpc_events.go index 05d788c66cd0a6..d1d9fc02634af5 100644 --- a/sdk/plugin/grpc_events.go +++ b/sdk/plugin/grpc_events.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/grpc_storage.go b/sdk/plugin/grpc_storage.go index 5c2f0de3f4f058..6a04b3a976871f 100644 --- a/sdk/plugin/grpc_storage.go +++ b/sdk/plugin/grpc_storage.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/grpc_system.go b/sdk/plugin/grpc_system.go index bf4537bd58f74b..ede025583c2d49 100644 --- a/sdk/plugin/grpc_system.go +++ b/sdk/plugin/grpc_system.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/grpc_system_test.go b/sdk/plugin/grpc_system_test.go index 19a5ecbaaeea00..7a282608ab3947 100644 --- a/sdk/plugin/grpc_system_test.go +++ b/sdk/plugin/grpc_system_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/logger.go b/sdk/plugin/logger.go index 1ef4694e9b8f4a..ecf6ed01f158ad 100644 --- a/sdk/plugin/logger.go +++ b/sdk/plugin/logger.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import hclog "github.com/hashicorp/go-hclog" diff --git a/sdk/plugin/logger_test.go b/sdk/plugin/logger_test.go index c47a70b1c579b3..a2b8a80155cdeb 100644 --- a/sdk/plugin/logger_test.go +++ b/sdk/plugin/logger_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/middleware.go b/sdk/plugin/middleware.go index 4411c788297b4f..546584ccc73672 100644 --- a/sdk/plugin/middleware.go +++ b/sdk/plugin/middleware.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/mock/backend.go b/sdk/plugin/mock/backend.go index 9b3aa2c851e29a..a75b639ef65363 100644 --- a/sdk/plugin/mock/backend.go +++ b/sdk/plugin/mock/backend.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mock import ( diff --git a/sdk/plugin/mock/backend_test.go b/sdk/plugin/mock/backend_test.go index 640eec11643dfb..15860906d4a726 100644 --- a/sdk/plugin/mock/backend_test.go +++ b/sdk/plugin/mock/backend_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mock import ( diff --git a/sdk/plugin/mock/path_errors.go b/sdk/plugin/mock/path_errors.go index f5e5b124fcb224..05ef474a7eafc6 100644 --- a/sdk/plugin/mock/path_errors.go +++ b/sdk/plugin/mock/path_errors.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mock import ( diff --git a/sdk/plugin/mock/path_internal.go b/sdk/plugin/mock/path_internal.go index 30c2926f5cfad3..26ede270fac216 100644 --- a/sdk/plugin/mock/path_internal.go +++ b/sdk/plugin/mock/path_internal.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mock import ( diff --git a/sdk/plugin/mock/path_kv.go b/sdk/plugin/mock/path_kv.go index fd8080572cd421..1946b576249645 100644 --- a/sdk/plugin/mock/path_kv.go +++ b/sdk/plugin/mock/path_kv.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mock import ( diff --git a/sdk/plugin/mock/path_raw.go b/sdk/plugin/mock/path_raw.go index 2a4b77fb731a25..55cb7c9374087c 100644 --- a/sdk/plugin/mock/path_raw.go +++ b/sdk/plugin/mock/path_raw.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mock import ( diff --git a/sdk/plugin/mock/path_special.go b/sdk/plugin/mock/path_special.go index 4223f91053dc04..22afa41c6daed4 100644 --- a/sdk/plugin/mock/path_special.go +++ b/sdk/plugin/mock/path_special.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package mock import ( diff --git a/sdk/plugin/pb/backend.pb.go b/sdk/plugin/pb/backend.pb.go index 82bbae2fd230fe..b59c2f25ec4641 100644 --- a/sdk/plugin/pb/backend.pb.go +++ b/sdk/plugin/pb/backend.pb.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 diff --git a/sdk/plugin/pb/backend.proto b/sdk/plugin/pb/backend.proto index ded407788a72c6..80a3647bafab2b 100644 --- a/sdk/plugin/pb/backend.proto +++ b/sdk/plugin/pb/backend.proto @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - syntax = "proto3"; package pb; diff --git a/sdk/plugin/pb/translation.go b/sdk/plugin/pb/translation.go index 92ca9af2428550..70bd35a16205df 100644 --- a/sdk/plugin/pb/translation.go +++ b/sdk/plugin/pb/translation.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pb import ( diff --git a/sdk/plugin/pb/translation_test.go b/sdk/plugin/pb/translation_test.go index 30979257acc145..83cad401e5534b 100644 --- a/sdk/plugin/pb/translation_test.go +++ b/sdk/plugin/pb/translation_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package pb import ( diff --git a/sdk/plugin/plugin.go b/sdk/plugin/plugin.go index ec58417ec7d228..edbffcd6983ac7 100644 --- a/sdk/plugin/plugin.go +++ b/sdk/plugin/plugin.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( @@ -154,4 +151,14 @@ func (b *BackendPluginClient) PluginVersion() logical.PluginVersion { return logical.EmptyPluginVersion } -var _ logical.PluginVersioner = (*BackendPluginClient)(nil) +func (b *BackendPluginClient) IsExternal() bool { + if externaler, ok := b.Backend.(logical.Externaler); ok { + return externaler.IsExternal() + } + return true // default to true since this is only used for GRPC plugins +} + +var ( + _ logical.PluginVersioner = (*BackendPluginClient)(nil) + _ logical.Externaler = (*BackendPluginClient)(nil) +) diff --git a/sdk/plugin/plugin_v5.go b/sdk/plugin/plugin_v5.go index cc2d1383775d4e..2adf020a48eee4 100644 --- a/sdk/plugin/plugin_v5.go +++ b/sdk/plugin/plugin_v5.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( @@ -55,7 +52,10 @@ func (b *BackendPluginClientV5) PluginVersion() logical.PluginVersion { return logical.EmptyPluginVersion } -var _ logical.PluginVersioner = (*BackendPluginClientV5)(nil) +var ( + _ logical.PluginVersioner = (*BackendPluginClientV5)(nil) + _ logical.Externaler = (*BackendPluginClientV5)(nil) +) // NewBackendV5 will return an instance of an RPC-based client implementation of // the backend for external plugins, or a concrete implementation of the diff --git a/sdk/plugin/serve.go b/sdk/plugin/serve.go index 9ad2b820bb7ed4..0da143f769b8b7 100644 --- a/sdk/plugin/serve.go +++ b/sdk/plugin/serve.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/plugin/storage_test.go b/sdk/plugin/storage_test.go index 61a5deec6720d6..651d3619904284 100644 --- a/sdk/plugin/storage_test.go +++ b/sdk/plugin/storage_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package plugin import ( diff --git a/sdk/queue/priority_queue.go b/sdk/queue/priority_queue.go index 802a538587b303..3994841773e650 100644 --- a/sdk/queue/priority_queue.go +++ b/sdk/queue/priority_queue.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - // Package queue provides Vault plugins with a Priority Queue. It can be used // as an in-memory list of queue.Item sorted by their priority, and offers // methods to find or remove items by their key. Internally it uses diff --git a/sdk/queue/priority_queue_test.go b/sdk/queue/priority_queue_test.go index 108a26cc0edcfa..928442b5246b8d 100644 --- a/sdk/queue/priority_queue_test.go +++ b/sdk/queue/priority_queue_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package queue import ( diff --git a/serviceregistration/consul/consul_service_registration.go b/serviceregistration/consul/consul_service_registration.go index a49ab4ff63f76b..79008967a56949 100644 --- a/serviceregistration/consul/consul_service_registration.go +++ b/serviceregistration/consul/consul_service_registration.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/serviceregistration/consul/consul_service_registration_test.go b/serviceregistration/consul/consul_service_registration_test.go index 0ced651e0242fb..21b2b2573b5330 100644 --- a/serviceregistration/consul/consul_service_registration_test.go +++ b/serviceregistration/consul/consul_service_registration_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package consul import ( diff --git a/serviceregistration/kubernetes/client/client.go b/serviceregistration/kubernetes/client/client.go index 96d19527245306..934d3bad908cde 100644 --- a/serviceregistration/kubernetes/client/client.go +++ b/serviceregistration/kubernetes/client/client.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package client import ( diff --git a/serviceregistration/kubernetes/client/client_test.go b/serviceregistration/kubernetes/client/client_test.go index de11dad37e2303..9f0dfad6e9a4b6 100644 --- a/serviceregistration/kubernetes/client/client_test.go +++ b/serviceregistration/kubernetes/client/client_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package client import ( diff --git a/serviceregistration/kubernetes/client/cmd/kubeclient/main.go b/serviceregistration/kubernetes/client/cmd/kubeclient/main.go index 7060a063e2d7a5..9eb031a362c696 100644 --- a/serviceregistration/kubernetes/client/cmd/kubeclient/main.go +++ b/serviceregistration/kubernetes/client/cmd/kubeclient/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main // This code builds a minimal binary of the lightweight kubernetes diff --git a/serviceregistration/kubernetes/client/config.go b/serviceregistration/kubernetes/client/config.go index be98240e219508..4e6a0f45848c6a 100644 --- a/serviceregistration/kubernetes/client/config.go +++ b/serviceregistration/kubernetes/client/config.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package client import ( diff --git a/serviceregistration/kubernetes/retry_handler.go b/serviceregistration/kubernetes/retry_handler.go index 46ac18eafdddcb..68afa8cdc576d3 100644 --- a/serviceregistration/kubernetes/retry_handler.go +++ b/serviceregistration/kubernetes/retry_handler.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kubernetes import ( diff --git a/serviceregistration/kubernetes/retry_handler_test.go b/serviceregistration/kubernetes/retry_handler_test.go index 0dd61113923e4b..e2be809d0a0956 100644 --- a/serviceregistration/kubernetes/retry_handler_test.go +++ b/serviceregistration/kubernetes/retry_handler_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kubernetes import ( diff --git a/serviceregistration/kubernetes/service_registration.go b/serviceregistration/kubernetes/service_registration.go index f377cbb9874eb7..f1c9a3c8ce4015 100644 --- a/serviceregistration/kubernetes/service_registration.go +++ b/serviceregistration/kubernetes/service_registration.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kubernetes import ( diff --git a/serviceregistration/kubernetes/service_registration_test.go b/serviceregistration/kubernetes/service_registration_test.go index a6a93c9ceeeed9..a1bf001f16424f 100644 --- a/serviceregistration/kubernetes/service_registration_test.go +++ b/serviceregistration/kubernetes/service_registration_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package kubernetes import ( diff --git a/serviceregistration/kubernetes/testing/testserver.go b/serviceregistration/kubernetes/testing/testserver.go index 6ceb94018625b0..4f406eb6871bc7 100644 --- a/serviceregistration/kubernetes/testing/testserver.go +++ b/serviceregistration/kubernetes/testing/testserver.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package testing import ( diff --git a/serviceregistration/service_registration.go b/serviceregistration/service_registration.go index 79f5b20d18839d..5dc67af2bee105 100644 --- a/serviceregistration/service_registration.go +++ b/serviceregistration/service_registration.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package serviceregistration /* diff --git a/shamir/shamir.go b/shamir/shamir.go index d9c0271a709854..22e3c337dcb147 100644 --- a/shamir/shamir.go +++ b/shamir/shamir.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package shamir import ( diff --git a/shamir/shamir_test.go b/shamir/shamir_test.go index 940a34ecf167f7..90a7c371c2942c 100644 --- a/shamir/shamir_test.go +++ b/shamir/shamir_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package shamir import ( diff --git a/tools/godoctests/main.go b/tools/godoctests/main.go index caa6ca0b93709a..3eba556704a11c 100644 --- a/tools/godoctests/main.go +++ b/tools/godoctests/main.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package main import ( diff --git a/tools/godoctests/pkg/analyzer/analyzer.go b/tools/godoctests/pkg/analyzer/analyzer.go index 38ed37d933a6db..8903f5ea5fb162 100644 --- a/tools/godoctests/pkg/analyzer/analyzer.go +++ b/tools/godoctests/pkg/analyzer/analyzer.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package analyzer import ( diff --git a/tools/godoctests/pkg/analyzer/analyzer_test.go b/tools/godoctests/pkg/analyzer/analyzer_test.go index df1bfafd463244..d7c80f2df7f515 100644 --- a/tools/godoctests/pkg/analyzer/analyzer_test.go +++ b/tools/godoctests/pkg/analyzer/analyzer_test.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package analyzer import ( diff --git a/tools/godoctests/pkg/analyzer/testdata/funcs.go b/tools/godoctests/pkg/analyzer/testdata/funcs.go index e9d5fead6744cc..e6f7deab17e18d 100644 --- a/tools/godoctests/pkg/analyzer/testdata/funcs.go +++ b/tools/godoctests/pkg/analyzer/testdata/funcs.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - package testdata import "testing" diff --git a/tools/gonilnilfunctions/main.go b/tools/gonilnilfunctions/main.go deleted file mode 100644 index 68fd796a9f8dd0..00000000000000 --- a/tools/gonilnilfunctions/main.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package main - -import ( - "github.com/hashicorp/vault/tools/gonilnilfunctions/pkg/analyzer" - "golang.org/x/tools/go/analysis/singlechecker" -) - -func main() { - singlechecker.Main(analyzer.Analyzer) -} diff --git a/tools/gonilnilfunctions/pkg/analyzer/analyzer.go b/tools/gonilnilfunctions/pkg/analyzer/analyzer.go deleted file mode 100644 index 2b9f17a6e8f396..00000000000000 --- a/tools/gonilnilfunctions/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package analyzer - -import ( - "go/ast" - "go/types" - "reflect" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -var Analyzer = &analysis.Analyzer{ - Name: "gonilnilfunctions", - Doc: "Verifies that every go function with error as one of its two return types cannot return nil, nil", - Run: run, - ResultType: reflect.TypeOf((interface{})(nil)), - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -// getNestedReturnStatements searches the AST for return statements, and returns -// them in a tail-call optimized list. -func getNestedReturnStatements(s ast.Stmt, returns []*ast.ReturnStmt) []*ast.ReturnStmt { - switch s := s.(type) { - case *ast.BlockStmt: - statements := make([]*ast.ReturnStmt, 0) - for _, stmt := range s.List { - statements = append(statements, getNestedReturnStatements(stmt, make([]*ast.ReturnStmt, 0))...) - } - - return append(returns, statements...) - case *ast.BranchStmt: - return returns - case *ast.ForStmt: - return getNestedReturnStatements(s.Body, returns) - case *ast.IfStmt: - return getNestedReturnStatements(s.Body, returns) - case *ast.LabeledStmt: - return getNestedReturnStatements(s.Stmt, returns) - case *ast.RangeStmt: - return getNestedReturnStatements(s.Body, returns) - case *ast.ReturnStmt: - return append(returns, s) - case *ast.SwitchStmt: - return getNestedReturnStatements(s.Body, returns) - case *ast.SelectStmt: - return getNestedReturnStatements(s.Body, returns) - case *ast.TypeSwitchStmt: - return getNestedReturnStatements(s.Body, returns) - case *ast.CommClause: - statements := make([]*ast.ReturnStmt, 0) - for _, stmt := range s.Body { - statements = append(statements, getNestedReturnStatements(stmt, make([]*ast.ReturnStmt, 0))...) - } - - return append(returns, statements...) - case *ast.CaseClause: - statements := make([]*ast.ReturnStmt, 0) - for _, stmt := range s.Body { - statements = append(statements, getNestedReturnStatements(stmt, make([]*ast.ReturnStmt, 0))...) - } - - return append(returns, statements...) - case *ast.ExprStmt: - return returns - } - return returns -} - -// run runs the analysis, failing for functions whose signatures contain two results including one error -// (e.g. (something, error)), that contain multiple nil returns -func run(pass *analysis.Pass) (interface{}, error) { - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - } - - inspector.Preorder(nodeFilter, func(node ast.Node) { - funcDecl, ok := node.(*ast.FuncDecl) - if !ok { - return - } - - // If the function has the "Ignore" godoc comment, skip it - if strings.Contains(funcDecl.Doc.Text(), "ignore-nil-nil-function-check") { - return - } - - // The function returns something - if funcDecl == nil || funcDecl.Type == nil || funcDecl.Type.Results == nil { - return - } - - // The function has more than 1 return value - results := funcDecl.Type.Results.List - if len(results) < 2 { - return - } - - // isError is a helper function to check if a Field is of error type - isError := func(field *ast.Field) bool { - if named, ok := pass.TypesInfo.TypeOf(field.Type).(*types.Named); ok { - namedObject := named.Obj() - return namedObject != nil && namedObject.Pkg() == nil && namedObject.Name() == "error" - } - return false - } - - // one of the return values is error - var errorFound bool - for _, result := range results { - if isError(result) { - errorFound = true - break - } - } - - if !errorFound { - return - } - - // Since these statements might be e.g. blocks with - // other statements inside, we need to get the return statements - // from inside them, first. - statements := funcDecl.Body.List - - returnStatements := make([]*ast.ReturnStmt, 0) - for _, statement := range statements { - returnStatements = append(returnStatements, getNestedReturnStatements(statement, make([]*ast.ReturnStmt, 0))...) - } - - for _, returnStatement := range returnStatements { - numResultsNil := 0 - results := returnStatement.Results - - // We only want two-arg functions (something, nil) - // We can remove this block in the future if we change our mind - if len(results) != 2 { - continue - } - - for _, result := range results { - // nil is an ident - ident, isIdent := result.(*ast.Ident) - if isIdent { - if ident.Name == "nil" { - // We found one nil in the return list - numResultsNil++ - } - } - } - // We found N nils, and our function returns N results, so this fails the check - if numResultsNil == len(results) { - // All the return values are nil, so we fail the report - pass.Reportf(node.Pos(), "Function %s can return an error, and has a statement that returns only nils", - funcDecl.Name.Name) - - // We break out of the loop of checking return statements, so that we don't repeat ourselves - break - } - } - }) - - var success interface{} - return success, nil -} diff --git a/tools/gonilnilfunctions/pkg/analyzer/analyzer_test.go b/tools/gonilnilfunctions/pkg/analyzer/analyzer_test.go deleted file mode 100644 index df1bfafd463244..00000000000000 --- a/tools/gonilnilfunctions/pkg/analyzer/analyzer_test.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package analyzer - -import ( - "os" - "path/filepath" - "testing" - - "golang.org/x/tools/go/analysis/analysistest" -) - -// TestAnalyzer runs the analyzer on the test functions in testdata/funcs.go. The report from the analyzer is compared against -// the comments in funcs.go beginning with "want." If there is no comment beginning with "want", then the analyzer is expected -// not to report anything. -func TestAnalyzer(t *testing.T) { - f, err := os.Getwd() - if err != nil { - t.Fatal("failed to get working directory", err) - } - analysistest.Run(t, filepath.Join(f, "testdata"), Analyzer, ".") -} diff --git a/tools/gonilnilfunctions/pkg/analyzer/testdata/funcs.go b/tools/gonilnilfunctions/pkg/analyzer/testdata/funcs.go deleted file mode 100644 index f783f01219b126..00000000000000 --- a/tools/gonilnilfunctions/pkg/analyzer/testdata/funcs.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package testdata - -func ReturnReturnOkay() (any, error) { - var i interface{} - return i, nil -} - -func OneGoodOneBad() (any, error) { // want "Function OneGoodOneBad can return an error, and has a statement that returns only nils" - var i interface{} - if true { - return i, nil - } - return nil, nil -} - -func OneBadOneGood() (any, error) { // want "Function OneBadOneGood can return an error, and has a statement that returns only nils" - var i interface{} - if true { - return nil, nil - } - return i, nil -} - -func EmptyFunc() {} - -func TwoNilNils() (any, error) { // want "Function TwoNilNils can return an error, and has a statement that returns only nils" - if true { - return nil, nil - } - return nil, nil -} - -// ThreeResults should not fail, as while it returns nil, nil, nil, it has three results, not two. -func ThreeResults() (any, any, error) { - return nil, nil, nil -} - -func TwoArgsNoError() (any, any) { - return nil, nil -} - -func NestedReturn() (any, error) { // want "Function NestedReturn can return an error, and has a statement that returns only nils" - { - { - { - return nil, nil - } - } - } -} - -func NestedForReturn() (any, error) { // want "Function NestedForReturn can return an error, and has a statement that returns only nils" - for { - for i := 0; i < 100; i++ { - { - return nil, nil - } - } - } -} - -func AnyErrorNilNil() (any, error) { // want "Function AnyErrorNilNil can return an error, and has a statement that returns only nils" - return nil, nil -} - -// Skipped should be skipped because of the following line: -// ignore-nil-nil-function-check -func Skipped() (any, error) { - return nil, nil -} diff --git a/tools/semgrep/ci/atomic.yml b/tools/semgrep/ci/atomic.yml index 1d6b2a9da79952..2fea38bd77b4f6 100644 --- a/tools/semgrep/ci/atomic.yml +++ b/tools/semgrep/ci/atomic.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: atomics-64bit-safety patterns: diff --git a/tools/semgrep/ci/bad-multierror-append.yml b/tools/semgrep/ci/bad-multierror-append.yml index bebb20013f396c..86b637577d9de5 100644 --- a/tools/semgrep/ci/bad-multierror-append.yml +++ b/tools/semgrep/ci/bad-multierror-append.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: bad-multierror-append patterns: diff --git a/tools/semgrep/ci/bad-nil-guard.yml b/tools/semgrep/ci/bad-nil-guard.yml index 70003690f72feb..f5fd122e8c965e 100644 --- a/tools/semgrep/ci/bad-nil-guard.yml +++ b/tools/semgrep/ci/bad-nil-guard.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: bad-nil-guard patterns: diff --git a/tools/semgrep/ci/error-shadowing.yml b/tools/semgrep/ci/error-shadowing.yml index 43ea1a3eb9fd0b..8362df5f7a7ec4 100644 --- a/tools/semgrep/ci/error-shadowing.yml +++ b/tools/semgrep/ci/error-shadowing.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: error-shadow-check-types patterns: diff --git a/tools/semgrep/ci/fmt-printf.yml b/tools/semgrep/ci/fmt-printf.yml index fc6e824446a00e..18777cabeffc41 100644 --- a/tools/semgrep/ci/fmt-printf.yml +++ b/tools/semgrep/ci/fmt-printf.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: fmt.Printf languages: [go] diff --git a/tools/semgrep/ci/hashsum.yml b/tools/semgrep/ci/hashsum.yml index 82765a12a262a8..47dfc02a98c404 100644 --- a/tools/semgrep/ci/hashsum.yml +++ b/tools/semgrep/ci/hashsum.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: hash-sum-without-write patterns: diff --git a/tools/semgrep/ci/hmac-bytes.yml b/tools/semgrep/ci/hmac-bytes.yml index d8da277064a2b3..629a8fe6c5b53a 100644 --- a/tools/semgrep/ci/hmac-bytes.yml +++ b/tools/semgrep/ci/hmac-bytes.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: use-hmac-equal patterns: diff --git a/tools/semgrep/ci/hmac-hash.yml b/tools/semgrep/ci/hmac-hash.yml index 2b03883c4a5122..625d271c18b414 100644 --- a/tools/semgrep/ci/hmac-hash.yml +++ b/tools/semgrep/ci/hmac-hash.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: hmac-needs-new patterns: diff --git a/tools/semgrep/ci/logger-format-string.yml b/tools/semgrep/ci/logger-format-string.yml index 14cb6cd4276cad..bb1b83e209bc39 100644 --- a/tools/semgrep/ci/logger-format-string.yml +++ b/tools/semgrep/ci/logger-format-string.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: logger-used-with-format-string patterns: diff --git a/tools/semgrep/ci/loop-time-after.yml b/tools/semgrep/ci/loop-time-after.yml index e3a5183a1fd69f..08586bb6b60c17 100644 --- a/tools/semgrep/ci/loop-time-after.yml +++ b/tools/semgrep/ci/loop-time-after.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: loop-time-after pattern: | diff --git a/tools/semgrep/ci/loopclosure.yml b/tools/semgrep/ci/loopclosure.yml index 88ab134c5dff3e..967376127db864 100644 --- a/tools/semgrep/ci/loopclosure.yml +++ b/tools/semgrep/ci/loopclosure.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: loopclosure patterns: diff --git a/tools/semgrep/ci/no-nil-check.yml b/tools/semgrep/ci/no-nil-check.yml index 0b1f1ce3720513..c39bbb5420f851 100644 --- a/tools/semgrep/ci/no-nil-check.yml +++ b/tools/semgrep/ci/no-nil-check.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: nil-check-logical-storage patterns: diff --git a/tools/semgrep/ci/oddifsequence.yml b/tools/semgrep/ci/oddifsequence.yml index 77b71b6a2a635f..bee36d06fad235 100644 --- a/tools/semgrep/ci/oddifsequence.yml +++ b/tools/semgrep/ci/oddifsequence.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: odd-sequence-ifs patterns: diff --git a/tools/semgrep/ci/return-nil-error.yml b/tools/semgrep/ci/return-nil-error.yml index a91e4eaecd6d9d..0b6f7f677c6533 100644 --- a/tools/semgrep/ci/return-nil-error.yml +++ b/tools/semgrep/ci/return-nil-error.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: return-nil patterns: diff --git a/tools/semgrep/ci/return-nil.yml b/tools/semgrep/ci/return-nil.yml index 2a6447cef7103e..18910c4a5fbb84 100644 --- a/tools/semgrep/ci/return-nil.yml +++ b/tools/semgrep/ci/return-nil.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: hc-return-nil patterns: diff --git a/tools/semgrep/ci/wrongerrcall.yml b/tools/semgrep/ci/wrongerrcall.yml index 315e26d5f2d08e..5a1a4d37af8d8c 100644 --- a/tools/semgrep/ci/wrongerrcall.yml +++ b/tools/semgrep/ci/wrongerrcall.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: maybe-wrong-err patterns: diff --git a/tools/semgrep/ci/wronglock.yml b/tools/semgrep/ci/wronglock.yml index 126a5446a4fd55..5f8422ce46cada 100644 --- a/tools/semgrep/ci/wronglock.yml +++ b/tools/semgrep/ci/wronglock.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: wrong-lock-unlock patterns: diff --git a/tools/semgrep/hostport.yml b/tools/semgrep/hostport.yml index 28613ecd903641..c47510e8814b20 100644 --- a/tools/semgrep/hostport.yml +++ b/tools/semgrep/hostport.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - # https://github.com/golang/go/issues/28308, from @stapelberg rules: - id: sprintf-host-port diff --git a/tools/semgrep/joinpath.yml b/tools/semgrep/joinpath.yml index ec27127bfa0301..173aba0ab0a0e4 100644 --- a/tools/semgrep/joinpath.yml +++ b/tools/semgrep/joinpath.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: use-strings-join-path patterns: diff --git a/tools/semgrep/lock-not-unlocked-on-return.yml b/tools/semgrep/lock-not-unlocked-on-return.yml index 6482b719459920..2d097f1aa0ff7a 100644 --- a/tools/semgrep/lock-not-unlocked-on-return.yml +++ b/tools/semgrep/lock-not-unlocked-on-return.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: lock_not_unlocked message: | diff --git a/tools/semgrep/logger-sprintf.yml b/tools/semgrep/logger-sprintf.yml index 7d2f48bcdc3ec3..3f58ba18ba8f7b 100644 --- a/tools/semgrep/logger-sprintf.yml +++ b/tools/semgrep/logger-sprintf.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: logger-used-with-sprintf patterns: diff --git a/tools/semgrep/paths-with-callbacks-and-operations.yml b/tools/semgrep/paths-with-callbacks-and-operations.yml index e29cbab65bafde..08e9c1ec1e3963 100644 --- a/tools/semgrep/paths-with-callbacks-and-operations.yml +++ b/tools/semgrep/paths-with-callbacks-and-operations.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: path-has-both-callbacks-and-operations patterns: diff --git a/tools/semgrep/paths-with-callbacks.yml b/tools/semgrep/paths-with-callbacks.yml index 9049a1d370a33c..3a122cc6d9e4a4 100644 --- a/tools/semgrep/paths-with-callbacks.yml +++ b/tools/semgrep/paths-with-callbacks.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: uses-path-callbacks patterns: diff --git a/tools/semgrep/physical-storage.yml b/tools/semgrep/physical-storage.yml index e7e978cc75a9d5..970c77693ed365 100644 --- a/tools/semgrep/physical-storage.yml +++ b/tools/semgrep/physical-storage.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: physical-storage-bypass-encryption patterns: diff --git a/tools/semgrep/replication-has-state.yml b/tools/semgrep/replication-has-state.yml index 7868e328087e52..416a59e6af6a2c 100644 --- a/tools/semgrep/replication-has-state.yml +++ b/tools/semgrep/replication-has-state.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: replication-state-should-use-IsPerfSecondary patterns: diff --git a/tools/semgrep/self-equals.yml b/tools/semgrep/self-equals.yml index ae7c1ff8c93c78..7cc5243f2b0753 100644 --- a/tools/semgrep/self-equals.yml +++ b/tools/semgrep/self-equals.yml @@ -1,6 +1,3 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - rules: - id: self-equals patterns: diff --git a/tools/tools.go b/tools/tools.go index a3f743244ffadc..5f81033df61cfd 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -1,6 +1,3 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - //go:build tools // This file ensures tool dependencies are kept in sync. This is the @@ -19,7 +16,6 @@ package tools //go:generate go install google.golang.org/protobuf/cmd/protoc-gen-go //go:generate go install google.golang.org/grpc/cmd/protoc-gen-go-grpc //go:generate go install github.com/favadi/protoc-go-inject-tag -//go:generate go install honnef.co/go/tools/cmd/staticcheck //go:generate go install github.com/golangci/revgrep/cmd/revgrep //go:generate go install gotest.tools/gotestsum import ( @@ -38,6 +34,4 @@ import ( _ "github.com/golangci/revgrep/cmd/revgrep" _ "gotest.tools/gotestsum" - - _ "honnef.co/go/tools/cmd/staticcheck" ) diff --git a/ui/.eslintrc.js b/ui/.eslintrc.js index bb4548319848ed..79b54b4eed05a1 100644 --- a/ui/.eslintrc.js +++ b/ui/.eslintrc.js @@ -1,8 +1,3 @@ -/** - * Copyright (c) HashiCorp, Inc. - * SPDX-License-Identifier: MPL-2.0 - */ - /* eslint-disable no-undef */ 'use strict'; diff --git a/ui/.github/workflows/ci.yml b/ui/.github/workflows/ci.yml new file mode 100644 index 00000000000000..6287d32644de61 --- /dev/null +++ b/ui/.github/workflows/ci.yml @@ -0,0 +1,45 @@ +name: CI + +on: + push: + branches: + - main + - master + pull_request: {} + +concurrency: + group: ci-${{ github.head_ref || github.ref }} + cancel-in-progress: true + +jobs: + lint: + name: "Lint" + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Install Node + uses: actions/setup-node@v3 + with: + node-version: 12.x + cache: yarn + - name: Install Dependencies + run: yarn install --frozen-lockfile + - name: Lint + run: yarn lint + + test: + name: "Test" + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Install Node + uses: actions/setup-node@v3 + with: + node-version: 12.x + cache: yarn + - name: Install Dependencies + run: yarn install --frozen-lockfile + - name: Run Tests + run: yarn test diff --git a/ui/.gitignore b/ui/.gitignore index f51263f3dd9b37..70da8c051de344 100644 --- a/ui/.gitignore +++ b/ui/.gitignore @@ -29,12 +29,3 @@ package-lock.json # broccoli-debug /DEBUG/ - -# yarn -.pnp.* -.yarn/* -!.yarn/patches -!.yarn/plugins -!.yarn/releases -!.yarn/sdks -!.yarn/versions diff --git a/ui/.nvmrc b/ui/.nvmrc new file mode 100644 index 00000000000000..958b5a36e1fad0 --- /dev/null +++ b/ui/.nvmrc @@ -0,0 +1 @@ +v14 diff --git a/ui/.prettierrc.js b/ui/.prettierrc.js index 8c776351a4545f..8f507fd9b33d0c 100644 --- a/ui/.prettierrc.js +++ b/ui/.prettierrc.js @@ -1,8 +1,3 @@ -/** - * Copyright (c) HashiCorp, Inc. - * SPDX-License-Identifier: MPL-2.0 - */ - 'use strict'; module.exports = { diff --git a/ui/.template-lintrc.js b/ui/.template-lintrc.js index 3540521890a8c6..e4e51b09aa4ba4 100644 --- a/ui/.template-lintrc.js +++ b/ui/.template-lintrc.js @@ -1,8 +1,3 @@ -/** - * Copyright (c) HashiCorp, Inc. - * SPDX-License-Identifier: MPL-2.0 - */ - 'use strict'; const fs = require('fs'); diff --git a/ui/.yarn/releases/yarn-1.19.1.js b/ui/.yarn/releases/yarn-1.19.1.js new file mode 100755 index 00000000000000..3907b87325d006 --- /dev/null +++ b/ui/.yarn/releases/yarn-1.19.1.js @@ -0,0 +1,147216 @@ +#!/usr/bin/env node +module.exports = +/******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // identity function for calling harmony imports with the correct context +/******/ __webpack_require__.i = function(value) { return value; }; +/******/ +/******/ // define getter function for harmony exports +/******/ __webpack_require__.d = function(exports, name, getter) { +/******/ if(!__webpack_require__.o(exports, name)) { +/******/ Object.defineProperty(exports, name, { +/******/ configurable: false, +/******/ enumerable: true, +/******/ get: getter +/******/ }); +/******/ } +/******/ }; +/******/ +/******/ // getDefaultExport function for compatibility with non-harmony modules +/******/ __webpack_require__.n = function(module) { +/******/ var getter = module && module.__esModule ? +/******/ function getDefault() { return module['default']; } : +/******/ function getModuleExports() { return module; }; +/******/ __webpack_require__.d(getter, 'a', getter); +/******/ return getter; +/******/ }; +/******/ +/******/ // Object.prototype.hasOwnProperty.call +/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(__webpack_require__.s = 549); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, exports) { + +module.exports = require("path"); + +/***/ }), +/* 1 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (immutable) */ __webpack_exports__["a"] = __extends; +/* unused harmony export __assign */ +/* unused harmony export __rest */ +/* unused harmony export __decorate */ +/* unused harmony export __param */ +/* unused harmony export __metadata */ +/* unused harmony export __awaiter */ +/* unused harmony export __generator */ +/* unused harmony export __exportStar */ +/* unused harmony export __values */ +/* unused harmony export __read */ +/* unused harmony export __spread */ +/* unused harmony export __await */ +/* unused harmony export __asyncGenerator */ +/* unused harmony export __asyncDelegator */ +/* unused harmony export __asyncValues */ +/* unused harmony export __makeTemplateObject */ +/* unused harmony export __importStar */ +/* unused harmony export __importDefault */ +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 + +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. + +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +function __extends(d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) if (e.indexOf(p[i]) < 0) + t[p[i]] = s[p[i]]; + return t; +} + +function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +function __awaiter(thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +function __exportStar(m, exports) { + for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; +} + +function __values(o) { + var m = typeof Symbol === "function" && o[Symbol.iterator], i = 0; + if (m) return m.call(o); + return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; +} + +function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result.default = mod; + return result; +} + +function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + + +/***/ }), +/* 2 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +exports.__esModule = true; + +var _promise = __webpack_require__(227); + +var _promise2 = _interopRequireDefault(_promise); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +exports.default = function (fn) { + return function () { + var gen = fn.apply(this, arguments); + return new _promise2.default(function (resolve, reject) { + function step(key, arg) { + try { + var info = gen[key](arg); + var value = info.value; + } catch (error) { + reject(error); + return; + } + + if (info.done) { + resolve(value); + } else { + return _promise2.default.resolve(value).then(function (value) { + step("next", value); + }, function (err) { + step("throw", err); + }); + } + } + + return step("next"); + }); + }; +}; + +/***/ }), +/* 3 */ +/***/ (function(module, exports) { + +module.exports = require("util"); + +/***/ }), +/* 4 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.getFirstSuitableFolder = exports.readFirstAvailableStream = exports.makeTempDir = exports.hardlinksWork = exports.writeFilePreservingEol = exports.getFileSizeOnDisk = exports.walk = exports.symlink = exports.find = exports.readJsonAndFile = exports.readJson = exports.readFileAny = exports.hardlinkBulk = exports.copyBulk = exports.unlink = exports.glob = exports.link = exports.chmod = exports.lstat = exports.exists = exports.mkdirp = exports.stat = exports.access = exports.rename = exports.readdir = exports.realpath = exports.readlink = exports.writeFile = exports.open = exports.readFileBuffer = exports.lockQueue = exports.constants = undefined; + +var _asyncToGenerator2; + +function _load_asyncToGenerator() { + return _asyncToGenerator2 = _interopRequireDefault(__webpack_require__(2)); +} + +let buildActionsForCopy = (() => { + var _ref = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (queue, events, possibleExtraneous, reporter) { + + // + let build = (() => { + var _ref5 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (data) { + const src = data.src, + dest = data.dest, + type = data.type; + + const onFresh = data.onFresh || noop; + const onDone = data.onDone || noop; + + // TODO https://github.com/yarnpkg/yarn/issues/3751 + // related to bundled dependencies handling + if (files.has(dest.toLowerCase())) { + reporter.verbose(`The case-insensitive file ${dest} shouldn't be copied twice in one bulk copy`); + } else { + files.add(dest.toLowerCase()); + } + + if (type === 'symlink') { + yield mkdirp((_path || _load_path()).default.dirname(dest)); + onFresh(); + actions.symlink.push({ + dest, + linkname: src + }); + onDone(); + return; + } + + if (events.ignoreBasenames.indexOf((_path || _load_path()).default.basename(src)) >= 0) { + // ignored file + return; + } + + const srcStat = yield lstat(src); + let srcFiles; + + if (srcStat.isDirectory()) { + srcFiles = yield readdir(src); + } + + let destStat; + try { + // try accessing the destination + destStat = yield lstat(dest); + } catch (e) { + // proceed if destination doesn't exist, otherwise error + if (e.code !== 'ENOENT') { + throw e; + } + } + + // if destination exists + if (destStat) { + const bothSymlinks = srcStat.isSymbolicLink() && destStat.isSymbolicLink(); + const bothFolders = srcStat.isDirectory() && destStat.isDirectory(); + const bothFiles = srcStat.isFile() && destStat.isFile(); + + // EINVAL access errors sometimes happen which shouldn't because node shouldn't be giving + // us modes that aren't valid. investigate this, it's generally safe to proceed. + + /* if (srcStat.mode !== destStat.mode) { + try { + await access(dest, srcStat.mode); + } catch (err) {} + } */ + + if (bothFiles && artifactFiles.has(dest)) { + // this file gets changed during build, likely by a custom install script. Don't bother checking it. + onDone(); + reporter.verbose(reporter.lang('verboseFileSkipArtifact', src)); + return; + } + + if (bothFiles && srcStat.size === destStat.size && (0, (_fsNormalized || _load_fsNormalized()).fileDatesEqual)(srcStat.mtime, destStat.mtime)) { + // we can safely assume this is the same file + onDone(); + reporter.verbose(reporter.lang('verboseFileSkip', src, dest, srcStat.size, +srcStat.mtime)); + return; + } + + if (bothSymlinks) { + const srcReallink = yield readlink(src); + if (srcReallink === (yield readlink(dest))) { + // if both symlinks are the same then we can continue on + onDone(); + reporter.verbose(reporter.lang('verboseFileSkipSymlink', src, dest, srcReallink)); + return; + } + } + + if (bothFolders) { + // mark files that aren't in this folder as possibly extraneous + const destFiles = yield readdir(dest); + invariant(srcFiles, 'src files not initialised'); + + for (var _iterator4 = destFiles, _isArray4 = Array.isArray(_iterator4), _i4 = 0, _iterator4 = _isArray4 ? _iterator4 : _iterator4[Symbol.iterator]();;) { + var _ref6; + + if (_isArray4) { + if (_i4 >= _iterator4.length) break; + _ref6 = _iterator4[_i4++]; + } else { + _i4 = _iterator4.next(); + if (_i4.done) break; + _ref6 = _i4.value; + } + + const file = _ref6; + + if (srcFiles.indexOf(file) < 0) { + const loc = (_path || _load_path()).default.join(dest, file); + possibleExtraneous.add(loc); + + if ((yield lstat(loc)).isDirectory()) { + for (var _iterator5 = yield readdir(loc), _isArray5 = Array.isArray(_iterator5), _i5 = 0, _iterator5 = _isArray5 ? _iterator5 : _iterator5[Symbol.iterator]();;) { + var _ref7; + + if (_isArray5) { + if (_i5 >= _iterator5.length) break; + _ref7 = _iterator5[_i5++]; + } else { + _i5 = _iterator5.next(); + if (_i5.done) break; + _ref7 = _i5.value; + } + + const file = _ref7; + + possibleExtraneous.add((_path || _load_path()).default.join(loc, file)); + } + } + } + } + } + } + + if (destStat && destStat.isSymbolicLink()) { + yield (0, (_fsNormalized || _load_fsNormalized()).unlink)(dest); + destStat = null; + } + + if (srcStat.isSymbolicLink()) { + onFresh(); + const linkname = yield readlink(src); + actions.symlink.push({ + dest, + linkname + }); + onDone(); + } else if (srcStat.isDirectory()) { + if (!destStat) { + reporter.verbose(reporter.lang('verboseFileFolder', dest)); + yield mkdirp(dest); + } + + const destParts = dest.split((_path || _load_path()).default.sep); + while (destParts.length) { + files.add(destParts.join((_path || _load_path()).default.sep).toLowerCase()); + destParts.pop(); + } + + // push all files to queue + invariant(srcFiles, 'src files not initialised'); + let remaining = srcFiles.length; + if (!remaining) { + onDone(); + } + for (var _iterator6 = srcFiles, _isArray6 = Array.isArray(_iterator6), _i6 = 0, _iterator6 = _isArray6 ? _iterator6 : _iterator6[Symbol.iterator]();;) { + var _ref8; + + if (_isArray6) { + if (_i6 >= _iterator6.length) break; + _ref8 = _iterator6[_i6++]; + } else { + _i6 = _iterator6.next(); + if (_i6.done) break; + _ref8 = _i6.value; + } + + const file = _ref8; + + queue.push({ + dest: (_path || _load_path()).default.join(dest, file), + onFresh, + onDone: function (_onDone) { + function onDone() { + return _onDone.apply(this, arguments); + } + + onDone.toString = function () { + return _onDone.toString(); + }; + + return onDone; + }(function () { + if (--remaining === 0) { + onDone(); + } + }), + src: (_path || _load_path()).default.join(src, file) + }); + } + } else if (srcStat.isFile()) { + onFresh(); + actions.file.push({ + src, + dest, + atime: srcStat.atime, + mtime: srcStat.mtime, + mode: srcStat.mode + }); + onDone(); + } else { + throw new Error(`unsure how to copy this: ${src}`); + } + }); + + return function build(_x5) { + return _ref5.apply(this, arguments); + }; + })(); + + const artifactFiles = new Set(events.artifactFiles || []); + const files = new Set(); + + // initialise events + for (var _iterator = queue, _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : _iterator[Symbol.iterator]();;) { + var _ref2; + + if (_isArray) { + if (_i >= _iterator.length) break; + _ref2 = _iterator[_i++]; + } else { + _i = _iterator.next(); + if (_i.done) break; + _ref2 = _i.value; + } + + const item = _ref2; + + const onDone = item.onDone; + item.onDone = function () { + events.onProgress(item.dest); + if (onDone) { + onDone(); + } + }; + } + events.onStart(queue.length); + + // start building actions + const actions = { + file: [], + symlink: [], + link: [] + }; + + // custom concurrency logic as we're always executing stacks of CONCURRENT_QUEUE_ITEMS queue items + // at a time due to the requirement to push items onto the queue + while (queue.length) { + const items = queue.splice(0, CONCURRENT_QUEUE_ITEMS); + yield Promise.all(items.map(build)); + } + + // simulate the existence of some files to prevent considering them extraneous + for (var _iterator2 = artifactFiles, _isArray2 = Array.isArray(_iterator2), _i2 = 0, _iterator2 = _isArray2 ? _iterator2 : _iterator2[Symbol.iterator]();;) { + var _ref3; + + if (_isArray2) { + if (_i2 >= _iterator2.length) break; + _ref3 = _iterator2[_i2++]; + } else { + _i2 = _iterator2.next(); + if (_i2.done) break; + _ref3 = _i2.value; + } + + const file = _ref3; + + if (possibleExtraneous.has(file)) { + reporter.verbose(reporter.lang('verboseFilePhantomExtraneous', file)); + possibleExtraneous.delete(file); + } + } + + for (var _iterator3 = possibleExtraneous, _isArray3 = Array.isArray(_iterator3), _i3 = 0, _iterator3 = _isArray3 ? _iterator3 : _iterator3[Symbol.iterator]();;) { + var _ref4; + + if (_isArray3) { + if (_i3 >= _iterator3.length) break; + _ref4 = _iterator3[_i3++]; + } else { + _i3 = _iterator3.next(); + if (_i3.done) break; + _ref4 = _i3.value; + } + + const loc = _ref4; + + if (files.has(loc.toLowerCase())) { + possibleExtraneous.delete(loc); + } + } + + return actions; + }); + + return function buildActionsForCopy(_x, _x2, _x3, _x4) { + return _ref.apply(this, arguments); + }; +})(); + +let buildActionsForHardlink = (() => { + var _ref9 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (queue, events, possibleExtraneous, reporter) { + + // + let build = (() => { + var _ref13 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (data) { + const src = data.src, + dest = data.dest; + + const onFresh = data.onFresh || noop; + const onDone = data.onDone || noop; + if (files.has(dest.toLowerCase())) { + // Fixes issue https://github.com/yarnpkg/yarn/issues/2734 + // When bulk hardlinking we have A -> B structure that we want to hardlink to A1 -> B1, + // package-linker passes that modules A1 and B1 need to be hardlinked, + // the recursive linking algorithm of A1 ends up scheduling files in B1 to be linked twice which will case + // an exception. + onDone(); + return; + } + files.add(dest.toLowerCase()); + + if (events.ignoreBasenames.indexOf((_path || _load_path()).default.basename(src)) >= 0) { + // ignored file + return; + } + + const srcStat = yield lstat(src); + let srcFiles; + + if (srcStat.isDirectory()) { + srcFiles = yield readdir(src); + } + + const destExists = yield exists(dest); + if (destExists) { + const destStat = yield lstat(dest); + + const bothSymlinks = srcStat.isSymbolicLink() && destStat.isSymbolicLink(); + const bothFolders = srcStat.isDirectory() && destStat.isDirectory(); + const bothFiles = srcStat.isFile() && destStat.isFile(); + + if (srcStat.mode !== destStat.mode) { + try { + yield access(dest, srcStat.mode); + } catch (err) { + // EINVAL access errors sometimes happen which shouldn't because node shouldn't be giving + // us modes that aren't valid. investigate this, it's generally safe to proceed. + reporter.verbose(err); + } + } + + if (bothFiles && artifactFiles.has(dest)) { + // this file gets changed during build, likely by a custom install script. Don't bother checking it. + onDone(); + reporter.verbose(reporter.lang('verboseFileSkipArtifact', src)); + return; + } + + // correct hardlink + if (bothFiles && srcStat.ino !== null && srcStat.ino === destStat.ino) { + onDone(); + reporter.verbose(reporter.lang('verboseFileSkip', src, dest, srcStat.ino)); + return; + } + + if (bothSymlinks) { + const srcReallink = yield readlink(src); + if (srcReallink === (yield readlink(dest))) { + // if both symlinks are the same then we can continue on + onDone(); + reporter.verbose(reporter.lang('verboseFileSkipSymlink', src, dest, srcReallink)); + return; + } + } + + if (bothFolders) { + // mark files that aren't in this folder as possibly extraneous + const destFiles = yield readdir(dest); + invariant(srcFiles, 'src files not initialised'); + + for (var _iterator10 = destFiles, _isArray10 = Array.isArray(_iterator10), _i10 = 0, _iterator10 = _isArray10 ? _iterator10 : _iterator10[Symbol.iterator]();;) { + var _ref14; + + if (_isArray10) { + if (_i10 >= _iterator10.length) break; + _ref14 = _iterator10[_i10++]; + } else { + _i10 = _iterator10.next(); + if (_i10.done) break; + _ref14 = _i10.value; + } + + const file = _ref14; + + if (srcFiles.indexOf(file) < 0) { + const loc = (_path || _load_path()).default.join(dest, file); + possibleExtraneous.add(loc); + + if ((yield lstat(loc)).isDirectory()) { + for (var _iterator11 = yield readdir(loc), _isArray11 = Array.isArray(_iterator11), _i11 = 0, _iterator11 = _isArray11 ? _iterator11 : _iterator11[Symbol.iterator]();;) { + var _ref15; + + if (_isArray11) { + if (_i11 >= _iterator11.length) break; + _ref15 = _iterator11[_i11++]; + } else { + _i11 = _iterator11.next(); + if (_i11.done) break; + _ref15 = _i11.value; + } + + const file = _ref15; + + possibleExtraneous.add((_path || _load_path()).default.join(loc, file)); + } + } + } + } + } + } + + if (srcStat.isSymbolicLink()) { + onFresh(); + const linkname = yield readlink(src); + actions.symlink.push({ + dest, + linkname + }); + onDone(); + } else if (srcStat.isDirectory()) { + reporter.verbose(reporter.lang('verboseFileFolder', dest)); + yield mkdirp(dest); + + const destParts = dest.split((_path || _load_path()).default.sep); + while (destParts.length) { + files.add(destParts.join((_path || _load_path()).default.sep).toLowerCase()); + destParts.pop(); + } + + // push all files to queue + invariant(srcFiles, 'src files not initialised'); + let remaining = srcFiles.length; + if (!remaining) { + onDone(); + } + for (var _iterator12 = srcFiles, _isArray12 = Array.isArray(_iterator12), _i12 = 0, _iterator12 = _isArray12 ? _iterator12 : _iterator12[Symbol.iterator]();;) { + var _ref16; + + if (_isArray12) { + if (_i12 >= _iterator12.length) break; + _ref16 = _iterator12[_i12++]; + } else { + _i12 = _iterator12.next(); + if (_i12.done) break; + _ref16 = _i12.value; + } + + const file = _ref16; + + queue.push({ + onFresh, + src: (_path || _load_path()).default.join(src, file), + dest: (_path || _load_path()).default.join(dest, file), + onDone: function (_onDone2) { + function onDone() { + return _onDone2.apply(this, arguments); + } + + onDone.toString = function () { + return _onDone2.toString(); + }; + + return onDone; + }(function () { + if (--remaining === 0) { + onDone(); + } + }) + }); + } + } else if (srcStat.isFile()) { + onFresh(); + actions.link.push({ + src, + dest, + removeDest: destExists + }); + onDone(); + } else { + throw new Error(`unsure how to copy this: ${src}`); + } + }); + + return function build(_x10) { + return _ref13.apply(this, arguments); + }; + })(); + + const artifactFiles = new Set(events.artifactFiles || []); + const files = new Set(); + + // initialise events + for (var _iterator7 = queue, _isArray7 = Array.isArray(_iterator7), _i7 = 0, _iterator7 = _isArray7 ? _iterator7 : _iterator7[Symbol.iterator]();;) { + var _ref10; + + if (_isArray7) { + if (_i7 >= _iterator7.length) break; + _ref10 = _iterator7[_i7++]; + } else { + _i7 = _iterator7.next(); + if (_i7.done) break; + _ref10 = _i7.value; + } + + const item = _ref10; + + const onDone = item.onDone || noop; + item.onDone = function () { + events.onProgress(item.dest); + onDone(); + }; + } + events.onStart(queue.length); + + // start building actions + const actions = { + file: [], + symlink: [], + link: [] + }; + + // custom concurrency logic as we're always executing stacks of CONCURRENT_QUEUE_ITEMS queue items + // at a time due to the requirement to push items onto the queue + while (queue.length) { + const items = queue.splice(0, CONCURRENT_QUEUE_ITEMS); + yield Promise.all(items.map(build)); + } + + // simulate the existence of some files to prevent considering them extraneous + for (var _iterator8 = artifactFiles, _isArray8 = Array.isArray(_iterator8), _i8 = 0, _iterator8 = _isArray8 ? _iterator8 : _iterator8[Symbol.iterator]();;) { + var _ref11; + + if (_isArray8) { + if (_i8 >= _iterator8.length) break; + _ref11 = _iterator8[_i8++]; + } else { + _i8 = _iterator8.next(); + if (_i8.done) break; + _ref11 = _i8.value; + } + + const file = _ref11; + + if (possibleExtraneous.has(file)) { + reporter.verbose(reporter.lang('verboseFilePhantomExtraneous', file)); + possibleExtraneous.delete(file); + } + } + + for (var _iterator9 = possibleExtraneous, _isArray9 = Array.isArray(_iterator9), _i9 = 0, _iterator9 = _isArray9 ? _iterator9 : _iterator9[Symbol.iterator]();;) { + var _ref12; + + if (_isArray9) { + if (_i9 >= _iterator9.length) break; + _ref12 = _iterator9[_i9++]; + } else { + _i9 = _iterator9.next(); + if (_i9.done) break; + _ref12 = _i9.value; + } + + const loc = _ref12; + + if (files.has(loc.toLowerCase())) { + possibleExtraneous.delete(loc); + } + } + + return actions; + }); + + return function buildActionsForHardlink(_x6, _x7, _x8, _x9) { + return _ref9.apply(this, arguments); + }; +})(); + +let copyBulk = exports.copyBulk = (() => { + var _ref17 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (queue, reporter, _events) { + const events = { + onStart: _events && _events.onStart || noop, + onProgress: _events && _events.onProgress || noop, + possibleExtraneous: _events ? _events.possibleExtraneous : new Set(), + ignoreBasenames: _events && _events.ignoreBasenames || [], + artifactFiles: _events && _events.artifactFiles || [] + }; + + const actions = yield buildActionsForCopy(queue, events, events.possibleExtraneous, reporter); + events.onStart(actions.file.length + actions.symlink.length + actions.link.length); + + const fileActions = actions.file; + + const currentlyWriting = new Map(); + + yield (_promise || _load_promise()).queue(fileActions, (() => { + var _ref18 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (data) { + let writePromise; + while (writePromise = currentlyWriting.get(data.dest)) { + yield writePromise; + } + + reporter.verbose(reporter.lang('verboseFileCopy', data.src, data.dest)); + const copier = (0, (_fsNormalized || _load_fsNormalized()).copyFile)(data, function () { + return currentlyWriting.delete(data.dest); + }); + currentlyWriting.set(data.dest, copier); + events.onProgress(data.dest); + return copier; + }); + + return function (_x14) { + return _ref18.apply(this, arguments); + }; + })(), CONCURRENT_QUEUE_ITEMS); + + // we need to copy symlinks last as they could reference files we were copying + const symlinkActions = actions.symlink; + yield (_promise || _load_promise()).queue(symlinkActions, function (data) { + const linkname = (_path || _load_path()).default.resolve((_path || _load_path()).default.dirname(data.dest), data.linkname); + reporter.verbose(reporter.lang('verboseFileSymlink', data.dest, linkname)); + return symlink(linkname, data.dest); + }); + }); + + return function copyBulk(_x11, _x12, _x13) { + return _ref17.apply(this, arguments); + }; +})(); + +let hardlinkBulk = exports.hardlinkBulk = (() => { + var _ref19 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (queue, reporter, _events) { + const events = { + onStart: _events && _events.onStart || noop, + onProgress: _events && _events.onProgress || noop, + possibleExtraneous: _events ? _events.possibleExtraneous : new Set(), + artifactFiles: _events && _events.artifactFiles || [], + ignoreBasenames: [] + }; + + const actions = yield buildActionsForHardlink(queue, events, events.possibleExtraneous, reporter); + events.onStart(actions.file.length + actions.symlink.length + actions.link.length); + + const fileActions = actions.link; + + yield (_promise || _load_promise()).queue(fileActions, (() => { + var _ref20 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (data) { + reporter.verbose(reporter.lang('verboseFileLink', data.src, data.dest)); + if (data.removeDest) { + yield (0, (_fsNormalized || _load_fsNormalized()).unlink)(data.dest); + } + yield link(data.src, data.dest); + }); + + return function (_x18) { + return _ref20.apply(this, arguments); + }; + })(), CONCURRENT_QUEUE_ITEMS); + + // we need to copy symlinks last as they could reference files we were copying + const symlinkActions = actions.symlink; + yield (_promise || _load_promise()).queue(symlinkActions, function (data) { + const linkname = (_path || _load_path()).default.resolve((_path || _load_path()).default.dirname(data.dest), data.linkname); + reporter.verbose(reporter.lang('verboseFileSymlink', data.dest, linkname)); + return symlink(linkname, data.dest); + }); + }); + + return function hardlinkBulk(_x15, _x16, _x17) { + return _ref19.apply(this, arguments); + }; +})(); + +let readFileAny = exports.readFileAny = (() => { + var _ref21 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (files) { + for (var _iterator13 = files, _isArray13 = Array.isArray(_iterator13), _i13 = 0, _iterator13 = _isArray13 ? _iterator13 : _iterator13[Symbol.iterator]();;) { + var _ref22; + + if (_isArray13) { + if (_i13 >= _iterator13.length) break; + _ref22 = _iterator13[_i13++]; + } else { + _i13 = _iterator13.next(); + if (_i13.done) break; + _ref22 = _i13.value; + } + + const file = _ref22; + + if (yield exists(file)) { + return readFile(file); + } + } + return null; + }); + + return function readFileAny(_x19) { + return _ref21.apply(this, arguments); + }; +})(); + +let readJson = exports.readJson = (() => { + var _ref23 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (loc) { + return (yield readJsonAndFile(loc)).object; + }); + + return function readJson(_x20) { + return _ref23.apply(this, arguments); + }; +})(); + +let readJsonAndFile = exports.readJsonAndFile = (() => { + var _ref24 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (loc) { + const file = yield readFile(loc); + try { + return { + object: (0, (_map || _load_map()).default)(JSON.parse(stripBOM(file))), + content: file + }; + } catch (err) { + err.message = `${loc}: ${err.message}`; + throw err; + } + }); + + return function readJsonAndFile(_x21) { + return _ref24.apply(this, arguments); + }; +})(); + +let find = exports.find = (() => { + var _ref25 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (filename, dir) { + const parts = dir.split((_path || _load_path()).default.sep); + + while (parts.length) { + const loc = parts.concat(filename).join((_path || _load_path()).default.sep); + + if (yield exists(loc)) { + return loc; + } else { + parts.pop(); + } + } + + return false; + }); + + return function find(_x22, _x23) { + return _ref25.apply(this, arguments); + }; +})(); + +let symlink = exports.symlink = (() => { + var _ref26 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (src, dest) { + if (process.platform !== 'win32') { + // use relative paths otherwise which will be retained if the directory is moved + src = (_path || _load_path()).default.relative((_path || _load_path()).default.dirname(dest), src); + // When path.relative returns an empty string for the current directory, we should instead use + // '.', which is a valid fs.symlink target. + src = src || '.'; + } + + try { + const stats = yield lstat(dest); + if (stats.isSymbolicLink()) { + const resolved = dest; + if (resolved === src) { + return; + } + } + } catch (err) { + if (err.code !== 'ENOENT') { + throw err; + } + } + + // We use rimraf for unlink which never throws an ENOENT on missing target + yield (0, (_fsNormalized || _load_fsNormalized()).unlink)(dest); + + if (process.platform === 'win32') { + // use directory junctions if possible on win32, this requires absolute paths + yield fsSymlink(src, dest, 'junction'); + } else { + yield fsSymlink(src, dest); + } + }); + + return function symlink(_x24, _x25) { + return _ref26.apply(this, arguments); + }; +})(); + +let walk = exports.walk = (() => { + var _ref27 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (dir, relativeDir, ignoreBasenames = new Set()) { + let files = []; + + let filenames = yield readdir(dir); + if (ignoreBasenames.size) { + filenames = filenames.filter(function (name) { + return !ignoreBasenames.has(name); + }); + } + + for (var _iterator14 = filenames, _isArray14 = Array.isArray(_iterator14), _i14 = 0, _iterator14 = _isArray14 ? _iterator14 : _iterator14[Symbol.iterator]();;) { + var _ref28; + + if (_isArray14) { + if (_i14 >= _iterator14.length) break; + _ref28 = _iterator14[_i14++]; + } else { + _i14 = _iterator14.next(); + if (_i14.done) break; + _ref28 = _i14.value; + } + + const name = _ref28; + + const relative = relativeDir ? (_path || _load_path()).default.join(relativeDir, name) : name; + const loc = (_path || _load_path()).default.join(dir, name); + const stat = yield lstat(loc); + + files.push({ + relative, + basename: name, + absolute: loc, + mtime: +stat.mtime + }); + + if (stat.isDirectory()) { + files = files.concat((yield walk(loc, relative, ignoreBasenames))); + } + } + + return files; + }); + + return function walk(_x26, _x27) { + return _ref27.apply(this, arguments); + }; +})(); + +let getFileSizeOnDisk = exports.getFileSizeOnDisk = (() => { + var _ref29 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (loc) { + const stat = yield lstat(loc); + const size = stat.size, + blockSize = stat.blksize; + + + return Math.ceil(size / blockSize) * blockSize; + }); + + return function getFileSizeOnDisk(_x28) { + return _ref29.apply(this, arguments); + }; +})(); + +let getEolFromFile = (() => { + var _ref30 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (path) { + if (!(yield exists(path))) { + return undefined; + } + + const buffer = yield readFileBuffer(path); + + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] === cr) { + return '\r\n'; + } + if (buffer[i] === lf) { + return '\n'; + } + } + return undefined; + }); + + return function getEolFromFile(_x29) { + return _ref30.apply(this, arguments); + }; +})(); + +let writeFilePreservingEol = exports.writeFilePreservingEol = (() => { + var _ref31 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (path, data) { + const eol = (yield getEolFromFile(path)) || (_os || _load_os()).default.EOL; + if (eol !== '\n') { + data = data.replace(/\n/g, eol); + } + yield writeFile(path, data); + }); + + return function writeFilePreservingEol(_x30, _x31) { + return _ref31.apply(this, arguments); + }; +})(); + +let hardlinksWork = exports.hardlinksWork = (() => { + var _ref32 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (dir) { + const filename = 'test-file' + Math.random(); + const file = (_path || _load_path()).default.join(dir, filename); + const fileLink = (_path || _load_path()).default.join(dir, filename + '-link'); + try { + yield writeFile(file, 'test'); + yield link(file, fileLink); + } catch (err) { + return false; + } finally { + yield (0, (_fsNormalized || _load_fsNormalized()).unlink)(file); + yield (0, (_fsNormalized || _load_fsNormalized()).unlink)(fileLink); + } + return true; + }); + + return function hardlinksWork(_x32) { + return _ref32.apply(this, arguments); + }; +})(); + +// not a strict polyfill for Node's fs.mkdtemp + + +let makeTempDir = exports.makeTempDir = (() => { + var _ref33 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (prefix) { + const dir = (_path || _load_path()).default.join((_os || _load_os()).default.tmpdir(), `yarn-${prefix || ''}-${Date.now()}-${Math.random()}`); + yield (0, (_fsNormalized || _load_fsNormalized()).unlink)(dir); + yield mkdirp(dir); + return dir; + }); + + return function makeTempDir(_x33) { + return _ref33.apply(this, arguments); + }; +})(); + +let readFirstAvailableStream = exports.readFirstAvailableStream = (() => { + var _ref34 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (paths) { + for (var _iterator15 = paths, _isArray15 = Array.isArray(_iterator15), _i15 = 0, _iterator15 = _isArray15 ? _iterator15 : _iterator15[Symbol.iterator]();;) { + var _ref35; + + if (_isArray15) { + if (_i15 >= _iterator15.length) break; + _ref35 = _iterator15[_i15++]; + } else { + _i15 = _iterator15.next(); + if (_i15.done) break; + _ref35 = _i15.value; + } + + const path = _ref35; + + try { + const fd = yield open(path, 'r'); + return (_fs || _load_fs()).default.createReadStream(path, { fd }); + } catch (err) { + // Try the next one + } + } + return null; + }); + + return function readFirstAvailableStream(_x34) { + return _ref34.apply(this, arguments); + }; +})(); + +let getFirstSuitableFolder = exports.getFirstSuitableFolder = (() => { + var _ref36 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (paths, mode = constants.W_OK | constants.X_OK) { + const result = { + skipped: [], + folder: null + }; + + for (var _iterator16 = paths, _isArray16 = Array.isArray(_iterator16), _i16 = 0, _iterator16 = _isArray16 ? _iterator16 : _iterator16[Symbol.iterator]();;) { + var _ref37; + + if (_isArray16) { + if (_i16 >= _iterator16.length) break; + _ref37 = _iterator16[_i16++]; + } else { + _i16 = _iterator16.next(); + if (_i16.done) break; + _ref37 = _i16.value; + } + + const folder = _ref37; + + try { + yield mkdirp(folder); + yield access(folder, mode); + + result.folder = folder; + + return result; + } catch (error) { + result.skipped.push({ + error, + folder + }); + } + } + return result; + }); + + return function getFirstSuitableFolder(_x35) { + return _ref36.apply(this, arguments); + }; +})(); + +exports.copy = copy; +exports.readFile = readFile; +exports.readFileRaw = readFileRaw; +exports.normalizeOS = normalizeOS; + +var _fs; + +function _load_fs() { + return _fs = _interopRequireDefault(__webpack_require__(5)); +} + +var _glob; + +function _load_glob() { + return _glob = _interopRequireDefault(__webpack_require__(99)); +} + +var _os; + +function _load_os() { + return _os = _interopRequireDefault(__webpack_require__(49)); +} + +var _path; + +function _load_path() { + return _path = _interopRequireDefault(__webpack_require__(0)); +} + +var _blockingQueue; + +function _load_blockingQueue() { + return _blockingQueue = _interopRequireDefault(__webpack_require__(110)); +} + +var _promise; + +function _load_promise() { + return _promise = _interopRequireWildcard(__webpack_require__(50)); +} + +var _promise2; + +function _load_promise2() { + return _promise2 = __webpack_require__(50); +} + +var _map; + +function _load_map() { + return _map = _interopRequireDefault(__webpack_require__(29)); +} + +var _fsNormalized; + +function _load_fsNormalized() { + return _fsNormalized = __webpack_require__(218); +} + +function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const constants = exports.constants = typeof (_fs || _load_fs()).default.constants !== 'undefined' ? (_fs || _load_fs()).default.constants : { + R_OK: (_fs || _load_fs()).default.R_OK, + W_OK: (_fs || _load_fs()).default.W_OK, + X_OK: (_fs || _load_fs()).default.X_OK +}; + +const lockQueue = exports.lockQueue = new (_blockingQueue || _load_blockingQueue()).default('fs lock'); + +const readFileBuffer = exports.readFileBuffer = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readFile); +const open = exports.open = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.open); +const writeFile = exports.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile); +const readlink = exports.readlink = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readlink); +const realpath = exports.realpath = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.realpath); +const readdir = exports.readdir = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readdir); +const rename = exports.rename = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.rename); +const access = exports.access = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.access); +const stat = exports.stat = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.stat); +const mkdirp = exports.mkdirp = (0, (_promise2 || _load_promise2()).promisify)(__webpack_require__(145)); +const exists = exports.exists = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.exists, true); +const lstat = exports.lstat = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.lstat); +const chmod = exports.chmod = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.chmod); +const link = exports.link = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.link); +const glob = exports.glob = (0, (_promise2 || _load_promise2()).promisify)((_glob || _load_glob()).default); +exports.unlink = (_fsNormalized || _load_fsNormalized()).unlink; + +// fs.copyFile uses the native file copying instructions on the system, performing much better +// than any JS-based solution and consumes fewer resources. Repeated testing to fine tune the +// concurrency level revealed 128 as the sweet spot on a quad-core, 16 CPU Intel system with SSD. + +const CONCURRENT_QUEUE_ITEMS = (_fs || _load_fs()).default.copyFile ? 128 : 4; + +const fsSymlink = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.symlink); +const invariant = __webpack_require__(9); +const stripBOM = __webpack_require__(160); + +const noop = () => {}; + +function copy(src, dest, reporter) { + return copyBulk([{ src, dest }], reporter); +} + +function _readFile(loc, encoding) { + return new Promise((resolve, reject) => { + (_fs || _load_fs()).default.readFile(loc, encoding, function (err, content) { + if (err) { + reject(err); + } else { + resolve(content); + } + }); + }); +} + +function readFile(loc) { + return _readFile(loc, 'utf8').then(normalizeOS); +} + +function readFileRaw(loc) { + return _readFile(loc, 'binary'); +} + +function normalizeOS(body) { + return body.replace(/\r\n/g, '\n'); +} + +const cr = '\r'.charCodeAt(0); +const lf = '\n'.charCodeAt(0); + +/***/ }), +/* 5 */ +/***/ (function(module, exports) { + +module.exports = require("fs"); + +/***/ }), +/* 6 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +class MessageError extends Error { + constructor(msg, code) { + super(msg); + this.code = code; + } + +} + +exports.MessageError = MessageError; +class ProcessSpawnError extends MessageError { + constructor(msg, code, process) { + super(msg, code); + this.process = process; + } + +} + +exports.ProcessSpawnError = ProcessSpawnError; +class SecurityError extends MessageError {} + +exports.SecurityError = SecurityError; +class ProcessTermError extends MessageError {} + +exports.ProcessTermError = ProcessTermError; +class ResponseError extends Error { + constructor(msg, responseCode) { + super(msg); + this.responseCode = responseCode; + } + +} + +exports.ResponseError = ResponseError; +class OneTimePasswordError extends Error {} +exports.OneTimePasswordError = OneTimePasswordError; + +/***/ }), +/* 7 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return Subscriber; }); +/* unused harmony export SafeSubscriber */ +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0_tslib__ = __webpack_require__(1); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__util_isFunction__ = __webpack_require__(154); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__Observer__ = __webpack_require__(420); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__Subscription__ = __webpack_require__(25); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4__internal_symbol_rxSubscriber__ = __webpack_require__(321); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_5__config__ = __webpack_require__(185); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_6__util_hostReportError__ = __webpack_require__(323); +/** PURE_IMPORTS_START tslib,_util_isFunction,_Observer,_Subscription,_internal_symbol_rxSubscriber,_config,_util_hostReportError PURE_IMPORTS_END */ + + + + + + + +var Subscriber = /*@__PURE__*/ (function (_super) { + __WEBPACK_IMPORTED_MODULE_0_tslib__["a" /* __extends */](Subscriber, _super); + function Subscriber(destinationOrNext, error, complete) { + var _this = _super.call(this) || this; + _this.syncErrorValue = null; + _this.syncErrorThrown = false; + _this.syncErrorThrowable = false; + _this.isStopped = false; + _this._parentSubscription = null; + switch (arguments.length) { + case 0: + _this.destination = __WEBPACK_IMPORTED_MODULE_2__Observer__["a" /* empty */]; + break; + case 1: + if (!destinationOrNext) { + _this.destination = __WEBPACK_IMPORTED_MODULE_2__Observer__["a" /* empty */]; + break; + } + if (typeof destinationOrNext === 'object') { + if (destinationOrNext instanceof Subscriber) { + _this.syncErrorThrowable = destinationOrNext.syncErrorThrowable; + _this.destination = destinationOrNext; + destinationOrNext.add(_this); + } + else { + _this.syncErrorThrowable = true; + _this.destination = new SafeSubscriber(_this, destinationOrNext); + } + break; + } + default: + _this.syncErrorThrowable = true; + _this.destination = new SafeSubscriber(_this, destinationOrNext, error, complete); + break; + } + return _this; + } + Subscriber.prototype[__WEBPACK_IMPORTED_MODULE_4__internal_symbol_rxSubscriber__["a" /* rxSubscriber */]] = function () { return this; }; + Subscriber.create = function (next, error, complete) { + var subscriber = new Subscriber(next, error, complete); + subscriber.syncErrorThrowable = false; + return subscriber; + }; + Subscriber.prototype.next = function (value) { + if (!this.isStopped) { + this._next(value); + } + }; + Subscriber.prototype.error = function (err) { + if (!this.isStopped) { + this.isStopped = true; + this._error(err); + } + }; + Subscriber.prototype.complete = function () { + if (!this.isStopped) { + this.isStopped = true; + this._complete(); + } + }; + Subscriber.prototype.unsubscribe = function () { + if (this.closed) { + return; + } + this.isStopped = true; + _super.prototype.unsubscribe.call(this); + }; + Subscriber.prototype._next = function (value) { + this.destination.next(value); + }; + Subscriber.prototype._error = function (err) { + this.destination.error(err); + this.unsubscribe(); + }; + Subscriber.prototype._complete = function () { + this.destination.complete(); + this.unsubscribe(); + }; + Subscriber.prototype._unsubscribeAndRecycle = function () { + var _a = this, _parent = _a._parent, _parents = _a._parents; + this._parent = null; + this._parents = null; + this.unsubscribe(); + this.closed = false; + this.isStopped = false; + this._parent = _parent; + this._parents = _parents; + this._parentSubscription = null; + return this; + }; + return Subscriber; +}(__WEBPACK_IMPORTED_MODULE_3__Subscription__["a" /* Subscription */])); + +var SafeSubscriber = /*@__PURE__*/ (function (_super) { + __WEBPACK_IMPORTED_MODULE_0_tslib__["a" /* __extends */](SafeSubscriber, _super); + function SafeSubscriber(_parentSubscriber, observerOrNext, error, complete) { + var _this = _super.call(this) || this; + _this._parentSubscriber = _parentSubscriber; + var next; + var context = _this; + if (__webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__util_isFunction__["a" /* isFunction */])(observerOrNext)) { + next = observerOrNext; + } + else if (observerOrNext) { + next = observerOrNext.next; + error = observerOrNext.error; + complete = observerOrNext.complete; + if (observerOrNext !== __WEBPACK_IMPORTED_MODULE_2__Observer__["a" /* empty */]) { + context = Object.create(observerOrNext); + if (__webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__util_isFunction__["a" /* isFunction */])(context.unsubscribe)) { + _this.add(context.unsubscribe.bind(context)); + } + context.unsubscribe = _this.unsubscribe.bind(_this); + } + } + _this._context = context; + _this._next = next; + _this._error = error; + _this._complete = complete; + return _this; + } + SafeSubscriber.prototype.next = function (value) { + if (!this.isStopped && this._next) { + var _parentSubscriber = this._parentSubscriber; + if (!__WEBPACK_IMPORTED_MODULE_5__config__["a" /* config */].useDeprecatedSynchronousErrorHandling || !_parentSubscriber.syncErrorThrowable) { + this.__tryOrUnsub(this._next, value); + } + else if (this.__tryOrSetError(_parentSubscriber, this._next, value)) { + this.unsubscribe(); + } + } + }; + SafeSubscriber.prototype.error = function (err) { + if (!this.isStopped) { + var _parentSubscriber = this._parentSubscriber; + var useDeprecatedSynchronousErrorHandling = __WEBPACK_IMPORTED_MODULE_5__config__["a" /* config */].useDeprecatedSynchronousErrorHandling; + if (this._error) { + if (!useDeprecatedSynchronousErrorHandling || !_parentSubscriber.syncErrorThrowable) { + this.__tryOrUnsub(this._error, err); + this.unsubscribe(); + } + else { + this.__tryOrSetError(_parentSubscriber, this._error, err); + this.unsubscribe(); + } + } + else if (!_parentSubscriber.syncErrorThrowable) { + this.unsubscribe(); + if (useDeprecatedSynchronousErrorHandling) { + throw err; + } + __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_6__util_hostReportError__["a" /* hostReportError */])(err); + } + else { + if (useDeprecatedSynchronousErrorHandling) { + _parentSubscriber.syncErrorValue = err; + _parentSubscriber.syncErrorThrown = true; + } + else { + __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_6__util_hostReportError__["a" /* hostReportError */])(err); + } + this.unsubscribe(); + } + } + }; + SafeSubscriber.prototype.complete = function () { + var _this = this; + if (!this.isStopped) { + var _parentSubscriber = this._parentSubscriber; + if (this._complete) { + var wrappedComplete = function () { return _this._complete.call(_this._context); }; + if (!__WEBPACK_IMPORTED_MODULE_5__config__["a" /* config */].useDeprecatedSynchronousErrorHandling || !_parentSubscriber.syncErrorThrowable) { + this.__tryOrUnsub(wrappedComplete); + this.unsubscribe(); + } + else { + this.__tryOrSetError(_parentSubscriber, wrappedComplete); + this.unsubscribe(); + } + } + else { + this.unsubscribe(); + } + } + }; + SafeSubscriber.prototype.__tryOrUnsub = function (fn, value) { + try { + fn.call(this._context, value); + } + catch (err) { + this.unsubscribe(); + if (__WEBPACK_IMPORTED_MODULE_5__config__["a" /* config */].useDeprecatedSynchronousErrorHandling) { + throw err; + } + else { + __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_6__util_hostReportError__["a" /* hostReportError */])(err); + } + } + }; + SafeSubscriber.prototype.__tryOrSetError = function (parent, fn, value) { + if (!__WEBPACK_IMPORTED_MODULE_5__config__["a" /* config */].useDeprecatedSynchronousErrorHandling) { + throw new Error('bad call'); + } + try { + fn.call(this._context, value); + } + catch (err) { + if (__WEBPACK_IMPORTED_MODULE_5__config__["a" /* config */].useDeprecatedSynchronousErrorHandling) { + parent.syncErrorValue = err; + parent.syncErrorThrown = true; + return true; + } + else { + __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_6__util_hostReportError__["a" /* hostReportError */])(err); + return true; + } + } + return false; + }; + SafeSubscriber.prototype._unsubscribe = function () { + var _parentSubscriber = this._parentSubscriber; + this._context = null; + this._parentSubscriber = null; + _parentSubscriber.unsubscribe(); + }; + return SafeSubscriber; +}(Subscriber)); + +//# sourceMappingURL=Subscriber.js.map + + +/***/ }), +/* 8 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.getPathKey = getPathKey; +const os = __webpack_require__(49); +const path = __webpack_require__(0); +const userHome = __webpack_require__(67).default; + +var _require = __webpack_require__(225); + +const getCacheDir = _require.getCacheDir, + getConfigDir = _require.getConfigDir, + getDataDir = _require.getDataDir; + +const isWebpackBundle = __webpack_require__(278); + +const DEPENDENCY_TYPES = exports.DEPENDENCY_TYPES = ['devDependencies', 'dependencies', 'optionalDependencies', 'peerDependencies']; +const OWNED_DEPENDENCY_TYPES = exports.OWNED_DEPENDENCY_TYPES = ['devDependencies', 'dependencies', 'optionalDependencies']; + +const RESOLUTIONS = exports.RESOLUTIONS = 'resolutions'; +const MANIFEST_FIELDS = exports.MANIFEST_FIELDS = [RESOLUTIONS, ...DEPENDENCY_TYPES]; + +const SUPPORTED_NODE_VERSIONS = exports.SUPPORTED_NODE_VERSIONS = '^4.8.0 || ^5.7.0 || ^6.2.2 || >=8.0.0'; + +const YARN_REGISTRY = exports.YARN_REGISTRY = 'https://registry.yarnpkg.com'; +const NPM_REGISTRY_RE = exports.NPM_REGISTRY_RE = /https?:\/\/registry\.npmjs\.org/g; + +const YARN_DOCS = exports.YARN_DOCS = 'https://yarnpkg.com/en/docs/cli/'; +const YARN_INSTALLER_SH = exports.YARN_INSTALLER_SH = 'https://yarnpkg.com/install.sh'; +const YARN_INSTALLER_MSI = exports.YARN_INSTALLER_MSI = 'https://yarnpkg.com/latest.msi'; + +const SELF_UPDATE_VERSION_URL = exports.SELF_UPDATE_VERSION_URL = 'https://yarnpkg.com/latest-version'; + +// cache version, bump whenever we make backwards incompatible changes +const CACHE_VERSION = exports.CACHE_VERSION = 6; + +// lockfile version, bump whenever we make backwards incompatible changes +const LOCKFILE_VERSION = exports.LOCKFILE_VERSION = 1; + +// max amount of network requests to perform concurrently +const NETWORK_CONCURRENCY = exports.NETWORK_CONCURRENCY = 8; + +// HTTP timeout used when downloading packages +const NETWORK_TIMEOUT = exports.NETWORK_TIMEOUT = 30 * 1000; // in milliseconds + +// max amount of child processes to execute concurrently +const CHILD_CONCURRENCY = exports.CHILD_CONCURRENCY = 5; + +const REQUIRED_PACKAGE_KEYS = exports.REQUIRED_PACKAGE_KEYS = ['name', 'version', '_uid']; + +function getPreferredCacheDirectories() { + const preferredCacheDirectories = [getCacheDir()]; + + if (process.getuid) { + // $FlowFixMe: process.getuid exists, dammit + preferredCacheDirectories.push(path.join(os.tmpdir(), `.yarn-cache-${process.getuid()}`)); + } + + preferredCacheDirectories.push(path.join(os.tmpdir(), `.yarn-cache`)); + + return preferredCacheDirectories; +} + +const PREFERRED_MODULE_CACHE_DIRECTORIES = exports.PREFERRED_MODULE_CACHE_DIRECTORIES = getPreferredCacheDirectories(); +const CONFIG_DIRECTORY = exports.CONFIG_DIRECTORY = getConfigDir(); +const DATA_DIRECTORY = exports.DATA_DIRECTORY = getDataDir(); +const LINK_REGISTRY_DIRECTORY = exports.LINK_REGISTRY_DIRECTORY = path.join(DATA_DIRECTORY, 'link'); +const GLOBAL_MODULE_DIRECTORY = exports.GLOBAL_MODULE_DIRECTORY = path.join(DATA_DIRECTORY, 'global'); + +const NODE_BIN_PATH = exports.NODE_BIN_PATH = process.execPath; +const YARN_BIN_PATH = exports.YARN_BIN_PATH = getYarnBinPath(); + +// Webpack needs to be configured with node.__dirname/__filename = false +function getYarnBinPath() { + if (isWebpackBundle) { + return __filename; + } else { + return path.join(__dirname, '..', 'bin', 'yarn.js'); + } +} + +const NODE_MODULES_FOLDER = exports.NODE_MODULES_FOLDER = 'node_modules'; +const NODE_PACKAGE_JSON = exports.NODE_PACKAGE_JSON = 'package.json'; + +const PNP_FILENAME = exports.PNP_FILENAME = '.pnp.js'; + +const POSIX_GLOBAL_PREFIX = exports.POSIX_GLOBAL_PREFIX = `${process.env.DESTDIR || ''}/usr/local`; +const FALLBACK_GLOBAL_PREFIX = exports.FALLBACK_GLOBAL_PREFIX = path.join(userHome, '.yarn'); + +const META_FOLDER = exports.META_FOLDER = '.yarn-meta'; +const INTEGRITY_FILENAME = exports.INTEGRITY_FILENAME = '.yarn-integrity'; +const LOCKFILE_FILENAME = exports.LOCKFILE_FILENAME = 'yarn.lock'; +const METADATA_FILENAME = exports.METADATA_FILENAME = '.yarn-metadata.json'; +const TARBALL_FILENAME = exports.TARBALL_FILENAME = '.yarn-tarball.tgz'; +const CLEAN_FILENAME = exports.CLEAN_FILENAME = '.yarnclean'; + +const NPM_LOCK_FILENAME = exports.NPM_LOCK_FILENAME = 'package-lock.json'; +const NPM_SHRINKWRAP_FILENAME = exports.NPM_SHRINKWRAP_FILENAME = 'npm-shrinkwrap.json'; + +const DEFAULT_INDENT = exports.DEFAULT_INDENT = ' '; +const SINGLE_INSTANCE_PORT = exports.SINGLE_INSTANCE_PORT = 31997; +const SINGLE_INSTANCE_FILENAME = exports.SINGLE_INSTANCE_FILENAME = '.yarn-single-instance'; + +const ENV_PATH_KEY = exports.ENV_PATH_KEY = getPathKey(process.platform, process.env); + +function getPathKey(platform, env) { + let pathKey = 'PATH'; + + // windows calls its path "Path" usually, but this is not guaranteed. + if (platform === 'win32') { + pathKey = 'Path'; + + for (const key in env) { + if (key.toLowerCase() === 'path') { + pathKey = key; + } + } + } + + return pathKey; +} + +const VERSION_COLOR_SCHEME = exports.VERSION_COLOR_SCHEME = { + major: 'red', + premajor: 'red', + minor: 'yellow', + preminor: 'yellow', + patch: 'green', + prepatch: 'green', + prerelease: 'red', + unchanged: 'white', + unknown: 'red' +}; + +/***/ }), +/* 9 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/** + * Copyright (c) 2013-present, Facebook, Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + + +/** + * Use invariant() to assert state which your program assumes to be true. + * + * Provide sprintf-style format (only %s is supported) and arguments + * to provide information about what broke and what you were + * expecting. + * + * The invariant message will be stripped in production, but the invariant + * will remain to ensure logic does not differ in production. + */ + +var NODE_ENV = process.env.NODE_ENV; + +var invariant = function(condition, format, a, b, c, d, e, f) { + if (NODE_ENV !== 'production') { + if (format === undefined) { + throw new Error('invariant requires an error message argument'); + } + } + + if (!condition) { + var error; + if (format === undefined) { + error = new Error( + 'Minified exception occurred; use the non-minified dev environment ' + + 'for the full error message and additional helpful warnings.' + ); + } else { + var args = [a, b, c, d, e, f]; + var argIndex = 0; + error = new Error( + format.replace(/%s/g, function() { return args[argIndex++]; }) + ); + error.name = 'Invariant Violation'; + } + + error.framesToPop = 1; // we don't care about invariant's own frame + throw error; + } +}; + +module.exports = invariant; + + +/***/ }), +/* 10 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +var YAMLException = __webpack_require__(54); + +var TYPE_CONSTRUCTOR_OPTIONS = [ + 'kind', + 'resolve', + 'construct', + 'instanceOf', + 'predicate', + 'represent', + 'defaultStyle', + 'styleAliases' +]; + +var YAML_NODE_KINDS = [ + 'scalar', + 'sequence', + 'mapping' +]; + +function compileStyleAliases(map) { + var result = {}; + + if (map !== null) { + Object.keys(map).forEach(function (style) { + map[style].forEach(function (alias) { + result[String(alias)] = style; + }); + }); + } + + return result; +} + +function Type(tag, options) { + options = options || {}; + + Object.keys(options).forEach(function (name) { + if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { + throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); + } + }); + + // TODO: Add tag format check. + this.tag = tag; + this.kind = options['kind'] || null; + this.resolve = options['resolve'] || function () { return true; }; + this.construct = options['construct'] || function (data) { return data; }; + this.instanceOf = options['instanceOf'] || null; + this.predicate = options['predicate'] || null; + this.represent = options['represent'] || null; + this.defaultStyle = options['defaultStyle'] || null; + this.styleAliases = compileStyleAliases(options['styleAliases'] || null); + + if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { + throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); + } +} + +module.exports = Type; + + +/***/ }), +/* 11 */ +/***/ (function(module, exports) { + +module.exports = require("crypto"); + +/***/ }), +/* 12 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return Observable; }); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__util_canReportError__ = __webpack_require__(322); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__util_toSubscriber__ = __webpack_require__(932); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__internal_symbol_observable__ = __webpack_require__(117); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__util_pipe__ = __webpack_require__(324); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4__config__ = __webpack_require__(185); +/** PURE_IMPORTS_START _util_canReportError,_util_toSubscriber,_internal_symbol_observable,_util_pipe,_config PURE_IMPORTS_END */ + + + + + +var Observable = /*@__PURE__*/ (function () { + function Observable(subscribe) { + this._isScalar = false; + if (subscribe) { + this._subscribe = subscribe; + } + } + Observable.prototype.lift = function (operator) { + var observable = new Observable(); + observable.source = this; + observable.operator = operator; + return observable; + }; + Observable.prototype.subscribe = function (observerOrNext, error, complete) { + var operator = this.operator; + var sink = __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__util_toSubscriber__["a" /* toSubscriber */])(observerOrNext, error, complete); + if (operator) { + operator.call(sink, this.source); + } + else { + sink.add(this.source || (__WEBPACK_IMPORTED_MODULE_4__config__["a" /* config */].useDeprecatedSynchronousErrorHandling && !sink.syncErrorThrowable) ? + this._subscribe(sink) : + this._trySubscribe(sink)); + } + if (__WEBPACK_IMPORTED_MODULE_4__config__["a" /* config */].useDeprecatedSynchronousErrorHandling) { + if (sink.syncErrorThrowable) { + sink.syncErrorThrowable = false; + if (sink.syncErrorThrown) { + throw sink.syncErrorValue; + } + } + } + return sink; + }; + Observable.prototype._trySubscribe = function (sink) { + try { + return this._subscribe(sink); + } + catch (err) { + if (__WEBPACK_IMPORTED_MODULE_4__config__["a" /* config */].useDeprecatedSynchronousErrorHandling) { + sink.syncErrorThrown = true; + sink.syncErrorValue = err; + } + if (__webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__util_canReportError__["a" /* canReportError */])(sink)) { + sink.error(err); + } + else { + console.warn(err); + } + } + }; + Observable.prototype.forEach = function (next, promiseCtor) { + var _this = this; + promiseCtor = getPromiseCtor(promiseCtor); + return new promiseCtor(function (resolve, reject) { + var subscription; + subscription = _this.subscribe(function (value) { + try { + next(value); + } + catch (err) { + reject(err); + if (subscription) { + subscription.unsubscribe(); + } + } + }, reject, resolve); + }); + }; + Observable.prototype._subscribe = function (subscriber) { + var source = this.source; + return source && source.subscribe(subscriber); + }; + Observable.prototype[__WEBPACK_IMPORTED_MODULE_2__internal_symbol_observable__["a" /* observable */]] = function () { + return this; + }; + Observable.prototype.pipe = function () { + var operations = []; + for (var _i = 0; _i < arguments.length; _i++) { + operations[_i] = arguments[_i]; + } + if (operations.length === 0) { + return this; + } + return __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_3__util_pipe__["b" /* pipeFromArray */])(operations)(this); + }; + Observable.prototype.toPromise = function (promiseCtor) { + var _this = this; + promiseCtor = getPromiseCtor(promiseCtor); + return new promiseCtor(function (resolve, reject) { + var value; + _this.subscribe(function (x) { return value = x; }, function (err) { return reject(err); }, function () { return resolve(value); }); + }); + }; + Observable.create = function (subscribe) { + return new Observable(subscribe); + }; + return Observable; +}()); + +function getPromiseCtor(promiseCtor) { + if (!promiseCtor) { + promiseCtor = __WEBPACK_IMPORTED_MODULE_4__config__["a" /* config */].Promise || Promise; + } + if (!promiseCtor) { + throw new Error('no Promise impl found'); + } + return promiseCtor; +} +//# sourceMappingURL=Observable.js.map + + +/***/ }), +/* 13 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return OuterSubscriber; }); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0_tslib__ = __webpack_require__(1); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__Subscriber__ = __webpack_require__(7); +/** PURE_IMPORTS_START tslib,_Subscriber PURE_IMPORTS_END */ + + +var OuterSubscriber = /*@__PURE__*/ (function (_super) { + __WEBPACK_IMPORTED_MODULE_0_tslib__["a" /* __extends */](OuterSubscriber, _super); + function OuterSubscriber() { + return _super !== null && _super.apply(this, arguments) || this; + } + OuterSubscriber.prototype.notifyNext = function (outerValue, innerValue, outerIndex, innerIndex, innerSub) { + this.destination.next(innerValue); + }; + OuterSubscriber.prototype.notifyError = function (error, innerSub) { + this.destination.error(error); + }; + OuterSubscriber.prototype.notifyComplete = function (innerSub) { + this.destination.complete(); + }; + return OuterSubscriber; +}(__WEBPACK_IMPORTED_MODULE_1__Subscriber__["a" /* Subscriber */])); + +//# sourceMappingURL=OuterSubscriber.js.map + + +/***/ }), +/* 14 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (immutable) */ __webpack_exports__["a"] = subscribeToResult; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__InnerSubscriber__ = __webpack_require__(84); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__subscribeTo__ = __webpack_require__(446); +/** PURE_IMPORTS_START _InnerSubscriber,_subscribeTo PURE_IMPORTS_END */ + + +function subscribeToResult(outerSubscriber, result, outerValue, outerIndex, destination) { + if (destination === void 0) { + destination = new __WEBPACK_IMPORTED_MODULE_0__InnerSubscriber__["a" /* InnerSubscriber */](outerSubscriber, outerValue, outerIndex); + } + if (destination.closed) { + return; + } + return __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__subscribeTo__["a" /* subscribeTo */])(result)(destination); +} +//# sourceMappingURL=subscribeToResult.js.map + + +/***/ }), +/* 15 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* eslint-disable node/no-deprecated-api */ + + + +var buffer = __webpack_require__(64) +var Buffer = buffer.Buffer + +var safer = {} + +var key + +for (key in buffer) { + if (!buffer.hasOwnProperty(key)) continue + if (key === 'SlowBuffer' || key === 'Buffer') continue + safer[key] = buffer[key] +} + +var Safer = safer.Buffer = {} +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue + Safer[key] = Buffer[key] +} + +safer.Buffer.prototype = Buffer.prototype + +if (!Safer.from || Safer.from === Uint8Array.from) { + Safer.from = function (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value) + } + if (value && typeof value.length === 'undefined') { + throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value) + } + return Buffer(value, encodingOrOffset, length) + } +} + +if (!Safer.alloc) { + Safer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + var buf = Buffer(size) + if (!fill || fill.length === 0) { + buf.fill(0) + } else if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + return buf + } +} + +if (!safer.kStringMaxLength) { + try { + safer.kStringMaxLength = process.binding('buffer').kStringMaxLength + } catch (e) { + // we can't determine kStringMaxLength in environments where process.binding + // is unsupported, so let's not set it + } +} + +if (!safer.constants) { + safer.constants = { + MAX_LENGTH: safer.kMaxLength + } + if (safer.kStringMaxLength) { + safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength + } +} + +module.exports = safer + + +/***/ }), +/* 16 */ +/***/ (function(module, exports, __webpack_require__) { + +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. + +var assert = __webpack_require__(28); +var Stream = __webpack_require__(23).Stream; +var util = __webpack_require__(3); + + +///--- Globals + +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; + + +///--- Internal + +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); +} + +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller + }); +} + +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} + +function noop() { + // Why even bother with asserts? +} + + +///--- Exports + +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg); + } + }, + finite: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' + } +}; + +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; + + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } + + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); + + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; + + return out; +} + +module.exports = _setExports(process.env.NODE_NDEBUG); + + +/***/ }), +/* 17 */ +/***/ (function(module, exports) { + +// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028 +var global = module.exports = typeof window != 'undefined' && window.Math == Math + ? window : typeof self != 'undefined' && self.Math == Math ? self + // eslint-disable-next-line no-new-func + : Function('return this')(); +if (typeof __g == 'number') __g = global; // eslint-disable-line no-undef + + +/***/ }), +/* 18 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.sortAlpha = sortAlpha; +exports.sortOptionsByFlags = sortOptionsByFlags; +exports.entries = entries; +exports.removePrefix = removePrefix; +exports.removeSuffix = removeSuffix; +exports.addSuffix = addSuffix; +exports.hyphenate = hyphenate; +exports.camelCase = camelCase; +exports.compareSortedArrays = compareSortedArrays; +exports.sleep = sleep; +const _camelCase = __webpack_require__(230); + +function sortAlpha(a, b) { + // sort alphabetically in a deterministic way + const shortLen = Math.min(a.length, b.length); + for (let i = 0; i < shortLen; i++) { + const aChar = a.charCodeAt(i); + const bChar = b.charCodeAt(i); + if (aChar !== bChar) { + return aChar - bChar; + } + } + return a.length - b.length; +} + +function sortOptionsByFlags(a, b) { + const aOpt = a.flags.replace(/-/g, ''); + const bOpt = b.flags.replace(/-/g, ''); + return sortAlpha(aOpt, bOpt); +} + +function entries(obj) { + const entries = []; + if (obj) { + for (const key in obj) { + entries.push([key, obj[key]]); + } + } + return entries; +} + +function removePrefix(pattern, prefix) { + if (pattern.startsWith(prefix)) { + pattern = pattern.slice(prefix.length); + } + + return pattern; +} + +function removeSuffix(pattern, suffix) { + if (pattern.endsWith(suffix)) { + return pattern.slice(0, -suffix.length); + } + + return pattern; +} + +function addSuffix(pattern, suffix) { + if (!pattern.endsWith(suffix)) { + return pattern + suffix; + } + + return pattern; +} + +function hyphenate(str) { + return str.replace(/[A-Z]/g, match => { + return '-' + match.charAt(0).toLowerCase(); + }); +} + +function camelCase(str) { + if (/[A-Z]/.test(str)) { + return null; + } else { + return _camelCase(str); + } +} + +function compareSortedArrays(array1, array2) { + if (array1.length !== array2.length) { + return false; + } + for (let i = 0, len = array1.length; i < len; i++) { + if (array1[i] !== array2[i]) { + return false; + } + } + return true; +} + +function sleep(ms) { + return new Promise(resolve => { + setTimeout(resolve, ms); + }); +} + +/***/ }), +/* 19 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.stringify = exports.parse = undefined; + +var _asyncToGenerator2; + +function _load_asyncToGenerator() { + return _asyncToGenerator2 = _interopRequireDefault(__webpack_require__(2)); +} + +var _parse; + +function _load_parse() { + return _parse = __webpack_require__(105); +} + +Object.defineProperty(exports, 'parse', { + enumerable: true, + get: function get() { + return _interopRequireDefault(_parse || _load_parse()).default; + } +}); + +var _stringify; + +function _load_stringify() { + return _stringify = __webpack_require__(199); +} + +Object.defineProperty(exports, 'stringify', { + enumerable: true, + get: function get() { + return _interopRequireDefault(_stringify || _load_stringify()).default; + } +}); +exports.implodeEntry = implodeEntry; +exports.explodeEntry = explodeEntry; + +var _misc; + +function _load_misc() { + return _misc = __webpack_require__(18); +} + +var _normalizePattern; + +function _load_normalizePattern() { + return _normalizePattern = __webpack_require__(37); +} + +var _parse2; + +function _load_parse2() { + return _parse2 = _interopRequireDefault(__webpack_require__(105)); +} + +var _constants; + +function _load_constants() { + return _constants = __webpack_require__(8); +} + +var _fs; + +function _load_fs() { + return _fs = _interopRequireWildcard(__webpack_require__(4)); +} + +function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const invariant = __webpack_require__(9); + +const path = __webpack_require__(0); +const ssri = __webpack_require__(65); + +function getName(pattern) { + return (0, (_normalizePattern || _load_normalizePattern()).normalizePattern)(pattern).name; +} + +function blankObjectUndefined(obj) { + return obj && Object.keys(obj).length ? obj : undefined; +} + +function keyForRemote(remote) { + return remote.resolved || (remote.reference && remote.hash ? `${remote.reference}#${remote.hash}` : null); +} + +function serializeIntegrity(integrity) { + // We need this because `Integrity.toString()` does not use sorting to ensure a stable string output + // See https://git.io/vx2Hy + return integrity.toString().split(' ').sort().join(' '); +} + +function implodeEntry(pattern, obj) { + const inferredName = getName(pattern); + const integrity = obj.integrity ? serializeIntegrity(obj.integrity) : ''; + const imploded = { + name: inferredName === obj.name ? undefined : obj.name, + version: obj.version, + uid: obj.uid === obj.version ? undefined : obj.uid, + resolved: obj.resolved, + registry: obj.registry === 'npm' ? undefined : obj.registry, + dependencies: blankObjectUndefined(obj.dependencies), + optionalDependencies: blankObjectUndefined(obj.optionalDependencies), + permissions: blankObjectUndefined(obj.permissions), + prebuiltVariants: blankObjectUndefined(obj.prebuiltVariants) + }; + if (integrity) { + imploded.integrity = integrity; + } + return imploded; +} + +function explodeEntry(pattern, obj) { + obj.optionalDependencies = obj.optionalDependencies || {}; + obj.dependencies = obj.dependencies || {}; + obj.uid = obj.uid || obj.version; + obj.permissions = obj.permissions || {}; + obj.registry = obj.registry || 'npm'; + obj.name = obj.name || getName(pattern); + const integrity = obj.integrity; + if (integrity && integrity.isIntegrity) { + obj.integrity = ssri.parse(integrity); + } + return obj; +} + +class Lockfile { + constructor({ cache, source, parseResultType } = {}) { + this.source = source || ''; + this.cache = cache; + this.parseResultType = parseResultType; + } + + // source string if the `cache` was parsed + + + // if true, we're parsing an old yarn file and need to update integrity fields + hasEntriesExistWithoutIntegrity() { + if (!this.cache) { + return false; + } + + for (const key in this.cache) { + // $FlowFixMe - `this.cache` is clearly defined at this point + if (!/^.*@(file:|http)/.test(key) && this.cache[key] && !this.cache[key].integrity) { + return true; + } + } + + return false; + } + + static fromDirectory(dir, reporter) { + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + // read the manifest in this directory + const lockfileLoc = path.join(dir, (_constants || _load_constants()).LOCKFILE_FILENAME); + + let lockfile; + let rawLockfile = ''; + let parseResult; + + if (yield (_fs || _load_fs()).exists(lockfileLoc)) { + rawLockfile = yield (_fs || _load_fs()).readFile(lockfileLoc); + parseResult = (0, (_parse2 || _load_parse2()).default)(rawLockfile, lockfileLoc); + + if (reporter) { + if (parseResult.type === 'merge') { + reporter.info(reporter.lang('lockfileMerged')); + } else if (parseResult.type === 'conflict') { + reporter.warn(reporter.lang('lockfileConflict')); + } + } + + lockfile = parseResult.object; + } else if (reporter) { + reporter.info(reporter.lang('noLockfileFound')); + } + + if (lockfile && lockfile.__metadata) { + const lockfilev2 = lockfile; + lockfile = {}; + } + + return new Lockfile({ cache: lockfile, source: rawLockfile, parseResultType: parseResult && parseResult.type }); + })(); + } + + getLocked(pattern) { + const cache = this.cache; + if (!cache) { + return undefined; + } + + const shrunk = pattern in cache && cache[pattern]; + + if (typeof shrunk === 'string') { + return this.getLocked(shrunk); + } else if (shrunk) { + explodeEntry(pattern, shrunk); + return shrunk; + } + + return undefined; + } + + removePattern(pattern) { + const cache = this.cache; + if (!cache) { + return; + } + delete cache[pattern]; + } + + getLockfile(patterns) { + const lockfile = {}; + const seen = new Map(); + + // order by name so that lockfile manifest is assigned to the first dependency with this manifest + // the others that have the same remoteKey will just refer to the first + // ordering allows for consistency in lockfile when it is serialized + const sortedPatternsKeys = Object.keys(patterns).sort((_misc || _load_misc()).sortAlpha); + + for (var _iterator = sortedPatternsKeys, _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : _iterator[Symbol.iterator]();;) { + var _ref; + + if (_isArray) { + if (_i >= _iterator.length) break; + _ref = _iterator[_i++]; + } else { + _i = _iterator.next(); + if (_i.done) break; + _ref = _i.value; + } + + const pattern = _ref; + + const pkg = patterns[pattern]; + const remote = pkg._remote, + ref = pkg._reference; + + invariant(ref, 'Package is missing a reference'); + invariant(remote, 'Package is missing a remote'); + + const remoteKey = keyForRemote(remote); + const seenPattern = remoteKey && seen.get(remoteKey); + if (seenPattern) { + // no point in duplicating it + lockfile[pattern] = seenPattern; + + // if we're relying on our name being inferred and two of the patterns have + // different inferred names then we need to set it + if (!seenPattern.name && getName(pattern) !== pkg.name) { + seenPattern.name = pkg.name; + } + continue; + } + const obj = implodeEntry(pattern, { + name: pkg.name, + version: pkg.version, + uid: pkg._uid, + resolved: remote.resolved, + integrity: remote.integrity, + registry: remote.registry, + dependencies: pkg.dependencies, + peerDependencies: pkg.peerDependencies, + optionalDependencies: pkg.optionalDependencies, + permissions: ref.permissions, + prebuiltVariants: pkg.prebuiltVariants + }); + + lockfile[pattern] = obj; + + if (remoteKey) { + seen.set(remoteKey, obj); + } + } + + return lockfile; + } +} +exports.default = Lockfile; + +/***/ }), +/* 20 */ +/***/ (function(module, exports, __webpack_require__) { + +var store = __webpack_require__(133)('wks'); +var uid = __webpack_require__(137); +var Symbol = __webpack_require__(17).Symbol; +var USE_SYMBOL = typeof Symbol == 'function'; + +var $exports = module.exports = function (name) { + return store[name] || (store[name] = + USE_SYMBOL && Symbol[name] || (USE_SYMBOL ? Symbol : uid)('Symbol.' + name)); +}; + +$exports.store = store; + + +/***/ }), +/* 21 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +exports.__esModule = true; + +var _assign = __webpack_require__(591); + +var _assign2 = _interopRequireDefault(_assign); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +exports.default = _assign2.default || function (target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i]; + + for (var key in source) { + if (Object.prototype.hasOwnProperty.call(source, key)) { + target[key] = source[key]; + } + } + } + + return target; +}; + +/***/ }), +/* 22 */ +/***/ (function(module, exports) { + +exports = module.exports = SemVer; + +// The debug function is excluded entirely from the minified version. +/* nomin */ var debug; +/* nomin */ if (typeof process === 'object' && + /* nomin */ process.env && + /* nomin */ process.env.NODE_DEBUG && + /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG)) + /* nomin */ debug = function() { + /* nomin */ var args = Array.prototype.slice.call(arguments, 0); + /* nomin */ args.unshift('SEMVER'); + /* nomin */ console.log.apply(console, args); + /* nomin */ }; +/* nomin */ else + /* nomin */ debug = function() {}; + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0'; + +var MAX_LENGTH = 256; +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991; + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16; + +// The actual regexps go on exports.re +var re = exports.re = []; +var src = exports.src = []; +var R = 0; + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++; +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; +var NUMERICIDENTIFIERLOOSE = R++; +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; + + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +var NONNUMERICIDENTIFIER = R++; +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; + + +// ## Main Version +// Three dot-separated numeric identifiers. + +var MAINVERSION = R++; +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')'; + +var MAINVERSIONLOOSE = R++; +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +var PRERELEASEIDENTIFIER = R++; +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')'; + +var PRERELEASEIDENTIFIERLOOSE = R++; +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')'; + + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++; +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; + +var PRERELEASELOOSE = R++; +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +var BUILDIDENTIFIER = R++; +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +var BUILD = R++; +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; + + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +var FULL = R++; +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?'; + +src[FULL] = '^' + FULLPLAIN + '$'; + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?'; + +var LOOSE = R++; +src[LOOSE] = '^' + LOOSEPLAIN + '$'; + +var GTLT = R++; +src[GTLT] = '((?:<|>)?=?)'; + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++; +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; +var XRANGEIDENTIFIER = R++; +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; + +var XRANGEPLAIN = R++; +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?'; + +var XRANGEPLAINLOOSE = R++; +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?'; + +var XRANGE = R++; +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; +var XRANGELOOSE = R++; +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++; +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])'; + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++; +src[LONETILDE] = '(?:~>?)'; + +var TILDETRIM = R++; +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); +var tildeTrimReplace = '$1~'; + +var TILDE = R++; +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; +var TILDELOOSE = R++; +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++; +src[LONECARET] = '(?:\\^)'; + +var CARETTRIM = R++; +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); +var caretTrimReplace = '$1^'; + +var CARET = R++; +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; +var CARETLOOSE = R++; +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++; +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; +var COMPARATOR = R++; +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; + + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++; +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; + +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); +var comparatorTrimReplace = '$1$2$3'; + + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++; +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$'; + +var HYPHENRANGELOOSE = R++; +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$'; + +// Star ranges basically just allow anything at all. +var STAR = R++; +src[STAR] = '(<|>)?=?\\s*\\*'; + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]); + if (!re[i]) + re[i] = new RegExp(src[i]); +} + +exports.parse = parse; +function parse(version, loose) { + if (version instanceof SemVer) + return version; + + if (typeof version !== 'string') + return null; + + if (version.length > MAX_LENGTH) + return null; + + var r = loose ? re[LOOSE] : re[FULL]; + if (!r.test(version)) + return null; + + try { + return new SemVer(version, loose); + } catch (er) { + return null; + } +} + +exports.valid = valid; +function valid(version, loose) { + var v = parse(version, loose); + return v ? v.version : null; +} + + +exports.clean = clean; +function clean(version, loose) { + var s = parse(version.trim().replace(/^[=v]+/, ''), loose); + return s ? s.version : null; +} + +exports.SemVer = SemVer; + +function SemVer(version, loose) { + if (version instanceof SemVer) { + if (version.loose === loose) + return version; + else + version = version.version; + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version); + } + + if (version.length > MAX_LENGTH) + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + + if (!(this instanceof SemVer)) + return new SemVer(version, loose); + + debug('SemVer', version, loose); + this.loose = loose; + var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); + + if (!m) + throw new TypeError('Invalid Version: ' + version); + + this.raw = version; + + // these are actually numbers + this.major = +m[1]; + this.minor = +m[2]; + this.patch = +m[3]; + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) + throw new TypeError('Invalid major version') + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) + throw new TypeError('Invalid minor version') + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) + throw new TypeError('Invalid patch version') + + // numberify any prerelease numeric ids + if (!m[4]) + this.prerelease = []; + else + this.prerelease = m[4].split('.').map(function(id) { + if (/^[0-9]+$/.test(id)) { + var num = +id; + if (num >= 0 && num < MAX_SAFE_INTEGER) + return num; + } + return id; + }); + + this.build = m[5] ? m[5].split('.') : []; + this.format(); +} + +SemVer.prototype.format = function() { + this.version = this.major + '.' + this.minor + '.' + this.patch; + if (this.prerelease.length) + this.version += '-' + this.prerelease.join('.'); + return this.version; +}; + +SemVer.prototype.toString = function() { + return this.version; +}; + +SemVer.prototype.compare = function(other) { + debug('SemVer.compare', this.version, this.loose, other); + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + return this.compareMain(other) || this.comparePre(other); +}; + +SemVer.prototype.compareMain = function(other) { + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch); +}; + +SemVer.prototype.comparePre = function(other) { + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) + return -1; + else if (!this.prerelease.length && other.prerelease.length) + return 1; + else if (!this.prerelease.length && !other.prerelease.length) + return 0; + + var i = 0; + do { + var a = this.prerelease[i]; + var b = other.prerelease[i]; + debug('prerelease compare', i, a, b); + if (a === undefined && b === undefined) + return 0; + else if (b === undefined) + return 1; + else if (a === undefined) + return -1; + else if (a === b) + continue; + else + return compareIdentifiers(a, b); + } while (++i); +}; + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function(release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0; + this.patch = 0; + this.minor = 0; + this.major++; + this.inc('pre', identifier); + break; + case 'preminor': + this.prerelease.length = 0; + this.patch = 0; + this.minor++; + this.inc('pre', identifier); + break; + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0; + this.inc('patch', identifier); + this.inc('pre', identifier); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) + this.inc('patch', identifier); + this.inc('pre', identifier); + break; + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) + this.major++; + this.minor = 0; + this.patch = 0; + this.prerelease = []; + break; + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) + this.minor++; + this.patch = 0; + this.prerelease = []; + break; + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) + this.patch++; + this.prerelease = []; + break; + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) + this.prerelease = [0]; + else { + var i = this.prerelease.length; + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++; + i = -2; + } + } + if (i === -1) // didn't increment anything + this.prerelease.push(0); + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) + this.prerelease = [identifier, 0]; + } else + this.prerelease = [identifier, 0]; + } + break; + + default: + throw new Error('invalid increment argument: ' + release); + } + this.format(); + this.raw = this.version; + return this; +}; + +exports.inc = inc; +function inc(version, release, loose, identifier) { + if (typeof(loose) === 'string') { + identifier = loose; + loose = undefined; + } + + try { + return new SemVer(version, loose).inc(release, identifier).version; + } catch (er) { + return null; + } +} + +exports.diff = diff; +function diff(version1, version2) { + if (eq(version1, version2)) { + return null; + } else { + var v1 = parse(version1); + var v2 = parse(version2); + if (v1.prerelease.length || v2.prerelease.length) { + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return 'pre'+key; + } + } + } + return 'prerelease'; + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return key; + } + } + } + } +} + +exports.compareIdentifiers = compareIdentifiers; + +var numeric = /^[0-9]+$/; +function compareIdentifiers(a, b) { + var anum = numeric.test(a); + var bnum = numeric.test(b); + + if (anum && bnum) { + a = +a; + b = +b; + } + + return (anum && !bnum) ? -1 : + (bnum && !anum) ? 1 : + a < b ? -1 : + a > b ? 1 : + 0; +} + +exports.rcompareIdentifiers = rcompareIdentifiers; +function rcompareIdentifiers(a, b) { + return compareIdentifiers(b, a); +} + +exports.major = major; +function major(a, loose) { + return new SemVer(a, loose).major; +} + +exports.minor = minor; +function minor(a, loose) { + return new SemVer(a, loose).minor; +} + +exports.patch = patch; +function patch(a, loose) { + return new SemVer(a, loose).patch; +} + +exports.compare = compare; +function compare(a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)); +} + +exports.compareLoose = compareLoose; +function compareLoose(a, b) { + return compare(a, b, true); +} + +exports.rcompare = rcompare; +function rcompare(a, b, loose) { + return compare(b, a, loose); +} + +exports.sort = sort; +function sort(list, loose) { + return list.sort(function(a, b) { + return exports.compare(a, b, loose); + }); +} + +exports.rsort = rsort; +function rsort(list, loose) { + return list.sort(function(a, b) { + return exports.rcompare(a, b, loose); + }); +} + +exports.gt = gt; +function gt(a, b, loose) { + return compare(a, b, loose) > 0; +} + +exports.lt = lt; +function lt(a, b, loose) { + return compare(a, b, loose) < 0; +} + +exports.eq = eq; +function eq(a, b, loose) { + return compare(a, b, loose) === 0; +} + +exports.neq = neq; +function neq(a, b, loose) { + return compare(a, b, loose) !== 0; +} + +exports.gte = gte; +function gte(a, b, loose) { + return compare(a, b, loose) >= 0; +} + +exports.lte = lte; +function lte(a, b, loose) { + return compare(a, b, loose) <= 0; +} + +exports.cmp = cmp; +function cmp(a, op, b, loose) { + var ret; + switch (op) { + case '===': + if (typeof a === 'object') a = a.version; + if (typeof b === 'object') b = b.version; + ret = a === b; + break; + case '!==': + if (typeof a === 'object') a = a.version; + if (typeof b === 'object') b = b.version; + ret = a !== b; + break; + case '': case '=': case '==': ret = eq(a, b, loose); break; + case '!=': ret = neq(a, b, loose); break; + case '>': ret = gt(a, b, loose); break; + case '>=': ret = gte(a, b, loose); break; + case '<': ret = lt(a, b, loose); break; + case '<=': ret = lte(a, b, loose); break; + default: throw new TypeError('Invalid operator: ' + op); + } + return ret; +} + +exports.Comparator = Comparator; +function Comparator(comp, loose) { + if (comp instanceof Comparator) { + if (comp.loose === loose) + return comp; + else + comp = comp.value; + } + + if (!(this instanceof Comparator)) + return new Comparator(comp, loose); + + debug('comparator', comp, loose); + this.loose = loose; + this.parse(comp); + + if (this.semver === ANY) + this.value = ''; + else + this.value = this.operator + this.semver.version; + + debug('comp', this); +} + +var ANY = {}; +Comparator.prototype.parse = function(comp) { + var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; + var m = comp.match(r); + + if (!m) + throw new TypeError('Invalid comparator: ' + comp); + + this.operator = m[1]; + if (this.operator === '=') + this.operator = ''; + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) + this.semver = ANY; + else + this.semver = new SemVer(m[2], this.loose); +}; + +Comparator.prototype.toString = function() { + return this.value; +}; + +Comparator.prototype.test = function(version) { + debug('Comparator.test', version, this.loose); + + if (this.semver === ANY) + return true; + + if (typeof version === 'string') + version = new SemVer(version, this.loose); + + return cmp(version, this.operator, this.semver, this.loose); +}; + +Comparator.prototype.intersects = function(comp, loose) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required'); + } + + var rangeTmp; + + if (this.operator === '') { + rangeTmp = new Range(comp.value, loose); + return satisfies(this.value, rangeTmp, loose); + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, loose); + return satisfies(comp.semver, rangeTmp, loose); + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>'); + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<'); + var sameSemVer = this.semver.version === comp.semver.version; + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<='); + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, loose) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')); + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, loose) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')); + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan; +}; + + +exports.Range = Range; +function Range(range, loose) { + if (range instanceof Range) { + if (range.loose === loose) { + return range; + } else { + return new Range(range.raw, loose); + } + } + + if (range instanceof Comparator) { + return new Range(range.value, loose); + } + + if (!(this instanceof Range)) + return new Range(range, loose); + + this.loose = loose; + + // First, split based on boolean or || + this.raw = range; + this.set = range.split(/\s*\|\|\s*/).map(function(range) { + return this.parseRange(range.trim()); + }, this).filter(function(c) { + // throw out any that are not relevant for whatever reason + return c.length; + }); + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range); + } + + this.format(); +} + +Range.prototype.format = function() { + this.range = this.set.map(function(comps) { + return comps.join(' ').trim(); + }).join('||').trim(); + return this.range; +}; + +Range.prototype.toString = function() { + return this.range; +}; + +Range.prototype.parseRange = function(range) { + var loose = this.loose; + range = range.trim(); + debug('range', range, loose); + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; + range = range.replace(hr, hyphenReplace); + debug('hyphen replace', range); + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); + debug('comparator trim', range, re[COMPARATORTRIM]); + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace); + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace); + + // normalize spaces + range = range.split(/\s+/).join(' '); + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; + var set = range.split(' ').map(function(comp) { + return parseComparator(comp, loose); + }).join(' ').split(/\s+/); + if (this.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function(comp) { + return !!comp.match(compRe); + }); + } + set = set.map(function(comp) { + return new Comparator(comp, loose); + }); + + return set; +}; + +Range.prototype.intersects = function(range, loose) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required'); + } + + return this.set.some(function(thisComparators) { + return thisComparators.every(function(thisComparator) { + return range.set.some(function(rangeComparators) { + return rangeComparators.every(function(rangeComparator) { + return thisComparator.intersects(rangeComparator, loose); + }); + }); + }); + }); +}; + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators; +function toComparators(range, loose) { + return new Range(range, loose).set.map(function(comp) { + return comp.map(function(c) { + return c.value; + }).join(' ').trim().split(' '); + }); +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator(comp, loose) { + debug('comp', comp); + comp = replaceCarets(comp, loose); + debug('caret', comp); + comp = replaceTildes(comp, loose); + debug('tildes', comp); + comp = replaceXRanges(comp, loose); + debug('xrange', comp); + comp = replaceStars(comp, loose); + debug('stars', comp); + return comp; +} + +function isX(id) { + return !id || id.toLowerCase() === 'x' || id === '*'; +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes(comp, loose) { + return comp.trim().split(/\s+/).map(function(comp) { + return replaceTilde(comp, loose); + }).join(' '); +} + +function replaceTilde(comp, loose) { + var r = loose ? re[TILDELOOSE] : re[TILDE]; + return comp.replace(r, function(_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr); + var ret; + + if (isX(M)) + ret = ''; + else if (isX(m)) + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + else if (isX(p)) + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + else if (pr) { + debug('replaceTilde pr', pr); + if (pr.charAt(0) !== '-') + pr = '-' + pr; + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0'; + + debug('tilde return', ret); + return ret; + }); +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets(comp, loose) { + return comp.trim().split(/\s+/).map(function(comp) { + return replaceCaret(comp, loose); + }).join(' '); +} + +function replaceCaret(comp, loose) { + debug('caret', comp, loose); + var r = loose ? re[CARETLOOSE] : re[CARET]; + return comp.replace(r, function(_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr); + var ret; + + if (isX(M)) + ret = ''; + else if (isX(m)) + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + else if (isX(p)) { + if (M === '0') + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + else + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'; + } else if (pr) { + debug('replaceCaret pr', pr); + if (pr.charAt(0) !== '-') + pr = '-' + pr; + if (M === '0') { + if (m === '0') + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + m + '.' + (+p + 1); + else + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + (+M + 1) + '.0.0'; + } else { + debug('no pr'); + if (M === '0') { + if (m === '0') + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1); + else + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0'; + } + + debug('caret return', ret); + return ret; + }); +} + +function replaceXRanges(comp, loose) { + debug('replaceXRanges', comp, loose); + return comp.split(/\s+/).map(function(comp) { + return replaceXRange(comp, loose); + }).join(' '); +} + +function replaceXRange(comp, loose) { + comp = comp.trim(); + var r = loose ? re[XRANGELOOSE] : re[XRANGE]; + return comp.replace(r, function(ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr); + var xM = isX(M); + var xm = xM || isX(m); + var xp = xm || isX(p); + var anyX = xp; + + if (gtlt === '=' && anyX) + gtlt = ''; + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0'; + } else { + // nothing is forbidden + ret = '*'; + } + } else if (gtlt && anyX) { + // replace X with 0 + if (xm) + m = 0; + if (xp) + p = 0; + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>='; + if (xm) { + M = +M + 1; + m = 0; + p = 0; + } else if (xp) { + m = +m + 1; + p = 0; + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<'; + if (xm) + M = +M + 1; + else + m = +m + 1; + } + + ret = gtlt + M + '.' + m + '.' + p; + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + } + + debug('xRange return', ret); + + return ret; + }); +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars(comp, loose) { + debug('replaceStars', comp, loose); + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], ''); +} + +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + + if (isX(fM)) + from = ''; + else if (isX(fm)) + from = '>=' + fM + '.0.0'; + else if (isX(fp)) + from = '>=' + fM + '.' + fm + '.0'; + else + from = '>=' + from; + + if (isX(tM)) + to = ''; + else if (isX(tm)) + to = '<' + (+tM + 1) + '.0.0'; + else if (isX(tp)) + to = '<' + tM + '.' + (+tm + 1) + '.0'; + else if (tpr) + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; + else + to = '<=' + to; + + return (from + ' ' + to).trim(); +} + + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function(version) { + if (!version) + return false; + + if (typeof version === 'string') + version = new SemVer(version, this.loose); + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version)) + return true; + } + return false; +}; + +function testSet(set, version) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) + return false; + } + + if (version.prerelease.length) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (var i = 0; i < set.length; i++) { + debug(set[i].semver); + if (set[i].semver === ANY) + continue; + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver; + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) + return true; + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false; + } + + return true; +} + +exports.satisfies = satisfies; +function satisfies(version, range, loose) { + try { + range = new Range(range, loose); + } catch (er) { + return false; + } + return range.test(version); +} + +exports.maxSatisfying = maxSatisfying; +function maxSatisfying(versions, range, loose) { + var max = null; + var maxSV = null; + try { + var rangeObj = new Range(range, loose); + } catch (er) { + return null; + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { // satisfies(v, range, loose) + if (!max || maxSV.compare(v) === -1) { // compare(max, v, true) + max = v; + maxSV = new SemVer(max, loose); + } + } + }) + return max; +} + +exports.minSatisfying = minSatisfying; +function minSatisfying(versions, range, loose) { + var min = null; + var minSV = null; + try { + var rangeObj = new Range(range, loose); + } catch (er) { + return null; + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { // satisfies(v, range, loose) + if (!min || minSV.compare(v) === 1) { // compare(min, v, true) + min = v; + minSV = new SemVer(min, loose); + } + } + }) + return min; +} + +exports.validRange = validRange; +function validRange(range, loose) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, loose).range || '*'; + } catch (er) { + return null; + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr; +function ltr(version, range, loose) { + return outside(version, range, '<', loose); +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr; +function gtr(version, range, loose) { + return outside(version, range, '>', loose); +} + +exports.outside = outside; +function outside(version, range, hilo, loose) { + version = new SemVer(version, loose); + range = new Range(range, loose); + + var gtfn, ltefn, ltfn, comp, ecomp; + switch (hilo) { + case '>': + gtfn = gt; + ltefn = lte; + ltfn = lt; + comp = '>'; + ecomp = '>='; + break; + case '<': + gtfn = lt; + ltefn = gte; + ltfn = gt; + comp = '<'; + ecomp = '<='; + break; + default: + throw new TypeError('Must provide a hilo val of "<" or ">"'); + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, loose)) { + return false; + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i]; + + var high = null; + var low = null; + + comparators.forEach(function(comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator; + low = low || comparator; + if (gtfn(comparator.semver, high.semver, loose)) { + high = comparator; + } else if (ltfn(comparator.semver, low.semver, loose)) { + low = comparator; + } + }); + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false; + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false; + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false; + } + } + return true; +} + +exports.prerelease = prerelease; +function prerelease(version, loose) { + var parsed = parse(version, loose); + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null; +} + +exports.intersects = intersects; +function intersects(r1, r2, loose) { + r1 = new Range(r1, loose) + r2 = new Range(r2, loose) + return r1.intersects(r2) +} + +exports.coerce = coerce; +function coerce(version) { + if (version instanceof SemVer) + return version; + + if (typeof version !== 'string') + return null; + + var match = version.match(re[COERCE]); + + if (match == null) + return null; + + return parse((match[1] || '0') + '.' + (match[2] || '0') + '.' + (match[3] || '0')); +} + + +/***/ }), +/* 23 */ +/***/ (function(module, exports) { + +module.exports = require("stream"); + +/***/ }), +/* 24 */ +/***/ (function(module, exports) { + +module.exports = require("url"); + +/***/ }), +/* 25 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return Subscription; }); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__util_isArray__ = __webpack_require__(41); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__util_isObject__ = __webpack_require__(444); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__util_isFunction__ = __webpack_require__(154); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__util_tryCatch__ = __webpack_require__(56); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4__util_errorObject__ = __webpack_require__(47); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_5__util_UnsubscriptionError__ = __webpack_require__(441); +/** PURE_IMPORTS_START _util_isArray,_util_isObject,_util_isFunction,_util_tryCatch,_util_errorObject,_util_UnsubscriptionError PURE_IMPORTS_END */ + + + + + + +var Subscription = /*@__PURE__*/ (function () { + function Subscription(unsubscribe) { + this.closed = false; + this._parent = null; + this._parents = null; + this._subscriptions = null; + if (unsubscribe) { + this._unsubscribe = unsubscribe; + } + } + Subscription.prototype.unsubscribe = function () { + var hasErrors = false; + var errors; + if (this.closed) { + return; + } + var _a = this, _parent = _a._parent, _parents = _a._parents, _unsubscribe = _a._unsubscribe, _subscriptions = _a._subscriptions; + this.closed = true; + this._parent = null; + this._parents = null; + this._subscriptions = null; + var index = -1; + var len = _parents ? _parents.length : 0; + while (_parent) { + _parent.remove(this); + _parent = ++index < len && _parents[index] || null; + } + if (__webpack_require__.i(__WEBPACK_IMPORTED_MODULE_2__util_isFunction__["a" /* isFunction */])(_unsubscribe)) { + var trial = __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_3__util_tryCatch__["a" /* tryCatch */])(_unsubscribe).call(this); + if (trial === __WEBPACK_IMPORTED_MODULE_4__util_errorObject__["a" /* errorObject */]) { + hasErrors = true; + errors = errors || (__WEBPACK_IMPORTED_MODULE_4__util_errorObject__["a" /* errorObject */].e instanceof __WEBPACK_IMPORTED_MODULE_5__util_UnsubscriptionError__["a" /* UnsubscriptionError */] ? + flattenUnsubscriptionErrors(__WEBPACK_IMPORTED_MODULE_4__util_errorObject__["a" /* errorObject */].e.errors) : [__WEBPACK_IMPORTED_MODULE_4__util_errorObject__["a" /* errorObject */].e]); + } + } + if (__webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__util_isArray__["a" /* isArray */])(_subscriptions)) { + index = -1; + len = _subscriptions.length; + while (++index < len) { + var sub = _subscriptions[index]; + if (__webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__util_isObject__["a" /* isObject */])(sub)) { + var trial = __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_3__util_tryCatch__["a" /* tryCatch */])(sub.unsubscribe).call(sub); + if (trial === __WEBPACK_IMPORTED_MODULE_4__util_errorObject__["a" /* errorObject */]) { + hasErrors = true; + errors = errors || []; + var err = __WEBPACK_IMPORTED_MODULE_4__util_errorObject__["a" /* errorObject */].e; + if (err instanceof __WEBPACK_IMPORTED_MODULE_5__util_UnsubscriptionError__["a" /* UnsubscriptionError */]) { + errors = errors.concat(flattenUnsubscriptionErrors(err.errors)); + } + else { + errors.push(err); + } + } + } + } + } + if (hasErrors) { + throw new __WEBPACK_IMPORTED_MODULE_5__util_UnsubscriptionError__["a" /* UnsubscriptionError */](errors); + } + }; + Subscription.prototype.add = function (teardown) { + if (!teardown || (teardown === Subscription.EMPTY)) { + return Subscription.EMPTY; + } + if (teardown === this) { + return this; + } + var subscription = teardown; + switch (typeof teardown) { + case 'function': + subscription = new Subscription(teardown); + case 'object': + if (subscription.closed || typeof subscription.unsubscribe !== 'function') { + return subscription; + } + else if (this.closed) { + subscription.unsubscribe(); + return subscription; + } + else if (typeof subscription._addParent !== 'function') { + var tmp = subscription; + subscription = new Subscription(); + subscription._subscriptions = [tmp]; + } + break; + default: + throw new Error('unrecognized teardown ' + teardown + ' added to Subscription.'); + } + var subscriptions = this._subscriptions || (this._subscriptions = []); + subscriptions.push(subscription); + subscription._addParent(this); + return subscription; + }; + Subscription.prototype.remove = function (subscription) { + var subscriptions = this._subscriptions; + if (subscriptions) { + var subscriptionIndex = subscriptions.indexOf(subscription); + if (subscriptionIndex !== -1) { + subscriptions.splice(subscriptionIndex, 1); + } + } + }; + Subscription.prototype._addParent = function (parent) { + var _a = this, _parent = _a._parent, _parents = _a._parents; + if (!_parent || _parent === parent) { + this._parent = parent; + } + else if (!_parents) { + this._parents = [parent]; + } + else if (_parents.indexOf(parent) === -1) { + _parents.push(parent); + } + }; + Subscription.EMPTY = (function (empty) { + empty.closed = true; + return empty; + }(new Subscription())); + return Subscription; +}()); + +function flattenUnsubscriptionErrors(errors) { + return errors.reduce(function (errs, err) { return errs.concat((err instanceof __WEBPACK_IMPORTED_MODULE_5__util_UnsubscriptionError__["a" /* UnsubscriptionError */]) ? err.errors : err); }, []); +} +//# sourceMappingURL=Subscription.js.map + + +/***/ }), +/* 26 */ +/***/ (function(module, exports, __webpack_require__) { + +// Copyright 2015 Joyent, Inc. + +module.exports = { + bufferSplit: bufferSplit, + addRSAMissing: addRSAMissing, + calculateDSAPublic: calculateDSAPublic, + calculateED25519Public: calculateED25519Public, + calculateX25519Public: calculateX25519Public, + mpNormalize: mpNormalize, + mpDenormalize: mpDenormalize, + ecNormalize: ecNormalize, + countZeros: countZeros, + assertCompatible: assertCompatible, + isCompatible: isCompatible, + opensslKeyDeriv: opensslKeyDeriv, + opensshCipherInfo: opensshCipherInfo, + publicFromPrivateECDSA: publicFromPrivateECDSA, + zeroPadToLength: zeroPadToLength, + writeBitString: writeBitString, + readBitString: readBitString +}; + +var assert = __webpack_require__(16); +var Buffer = __webpack_require__(15).Buffer; +var PrivateKey = __webpack_require__(33); +var Key = __webpack_require__(27); +var crypto = __webpack_require__(11); +var algs = __webpack_require__(32); +var asn1 = __webpack_require__(66); + +var ec, jsbn; +var nacl; + +var MAX_CLASS_DEPTH = 3; + +function isCompatible(obj, klass, needVer) { + if (obj === null || typeof (obj) !== 'object') + return (false); + if (needVer === undefined) + needVer = klass.prototype._sshpkApiVersion; + if (obj instanceof klass && + klass.prototype._sshpkApiVersion[0] == needVer[0]) + return (true); + var proto = Object.getPrototypeOf(obj); + var depth = 0; + while (proto.constructor.name !== klass.name) { + proto = Object.getPrototypeOf(proto); + if (!proto || ++depth > MAX_CLASS_DEPTH) + return (false); + } + if (proto.constructor.name !== klass.name) + return (false); + var ver = proto._sshpkApiVersion; + if (ver === undefined) + ver = klass._oldVersionDetect(obj); + if (ver[0] != needVer[0] || ver[1] < needVer[1]) + return (false); + return (true); +} + +function assertCompatible(obj, klass, needVer, name) { + if (name === undefined) + name = 'object'; + assert.ok(obj, name + ' must not be null'); + assert.object(obj, name + ' must be an object'); + if (needVer === undefined) + needVer = klass.prototype._sshpkApiVersion; + if (obj instanceof klass && + klass.prototype._sshpkApiVersion[0] == needVer[0]) + return; + var proto = Object.getPrototypeOf(obj); + var depth = 0; + while (proto.constructor.name !== klass.name) { + proto = Object.getPrototypeOf(proto); + assert.ok(proto && ++depth <= MAX_CLASS_DEPTH, + name + ' must be a ' + klass.name + ' instance'); + } + assert.strictEqual(proto.constructor.name, klass.name, + name + ' must be a ' + klass.name + ' instance'); + var ver = proto._sshpkApiVersion; + if (ver === undefined) + ver = klass._oldVersionDetect(obj); + assert.ok(ver[0] == needVer[0] && ver[1] >= needVer[1], + name + ' must be compatible with ' + klass.name + ' klass ' + + 'version ' + needVer[0] + '.' + needVer[1]); +} + +var CIPHER_LEN = { + 'des-ede3-cbc': { key: 7, iv: 8 }, + 'aes-128-cbc': { key: 16, iv: 16 } +}; +var PKCS5_SALT_LEN = 8; + +function opensslKeyDeriv(cipher, salt, passphrase, count) { + assert.buffer(salt, 'salt'); + assert.buffer(passphrase, 'passphrase'); + assert.number(count, 'iteration count'); + + var clen = CIPHER_LEN[cipher]; + assert.object(clen, 'supported cipher'); + + salt = salt.slice(0, PKCS5_SALT_LEN); + + var D, D_prev, bufs; + var material = Buffer.alloc(0); + while (material.length < clen.key + clen.iv) { + bufs = []; + if (D_prev) + bufs.push(D_prev); + bufs.push(passphrase); + bufs.push(salt); + D = Buffer.concat(bufs); + for (var j = 0; j < count; ++j) + D = crypto.createHash('md5').update(D).digest(); + material = Buffer.concat([material, D]); + D_prev = D; + } + + return ({ + key: material.slice(0, clen.key), + iv: material.slice(clen.key, clen.key + clen.iv) + }); +} + +/* Count leading zero bits on a buffer */ +function countZeros(buf) { + var o = 0, obit = 8; + while (o < buf.length) { + var mask = (1 << obit); + if ((buf[o] & mask) === mask) + break; + obit--; + if (obit < 0) { + o++; + obit = 8; + } + } + return (o*8 + (8 - obit) - 1); +} + +function bufferSplit(buf, chr) { + assert.buffer(buf); + assert.string(chr); + + var parts = []; + var lastPart = 0; + var matches = 0; + for (var i = 0; i < buf.length; ++i) { + if (buf[i] === chr.charCodeAt(matches)) + ++matches; + else if (buf[i] === chr.charCodeAt(0)) + matches = 1; + else + matches = 0; + + if (matches >= chr.length) { + var newPart = i + 1; + parts.push(buf.slice(lastPart, newPart - matches)); + lastPart = newPart; + matches = 0; + } + } + if (lastPart <= buf.length) + parts.push(buf.slice(lastPart, buf.length)); + + return (parts); +} + +function ecNormalize(buf, addZero) { + assert.buffer(buf); + if (buf[0] === 0x00 && buf[1] === 0x04) { + if (addZero) + return (buf); + return (buf.slice(1)); + } else if (buf[0] === 0x04) { + if (!addZero) + return (buf); + } else { + while (buf[0] === 0x00) + buf = buf.slice(1); + if (buf[0] === 0x02 || buf[0] === 0x03) + throw (new Error('Compressed elliptic curve points ' + + 'are not supported')); + if (buf[0] !== 0x04) + throw (new Error('Not a valid elliptic curve point')); + if (!addZero) + return (buf); + } + var b = Buffer.alloc(buf.length + 1); + b[0] = 0x0; + buf.copy(b, 1); + return (b); +} + +function readBitString(der, tag) { + if (tag === undefined) + tag = asn1.Ber.BitString; + var buf = der.readString(tag, true); + assert.strictEqual(buf[0], 0x00, 'bit strings with unused bits are ' + + 'not supported (0x' + buf[0].toString(16) + ')'); + return (buf.slice(1)); +} + +function writeBitString(der, buf, tag) { + if (tag === undefined) + tag = asn1.Ber.BitString; + var b = Buffer.alloc(buf.length + 1); + b[0] = 0x00; + buf.copy(b, 1); + der.writeBuffer(b, tag); +} + +function mpNormalize(buf) { + assert.buffer(buf); + while (buf.length > 1 && buf[0] === 0x00 && (buf[1] & 0x80) === 0x00) + buf = buf.slice(1); + if ((buf[0] & 0x80) === 0x80) { + var b = Buffer.alloc(buf.length + 1); + b[0] = 0x00; + buf.copy(b, 1); + buf = b; + } + return (buf); +} + +function mpDenormalize(buf) { + assert.buffer(buf); + while (buf.length > 1 && buf[0] === 0x00) + buf = buf.slice(1); + return (buf); +} + +function zeroPadToLength(buf, len) { + assert.buffer(buf); + assert.number(len); + while (buf.length > len) { + assert.equal(buf[0], 0x00); + buf = buf.slice(1); + } + while (buf.length < len) { + var b = Buffer.alloc(buf.length + 1); + b[0] = 0x00; + buf.copy(b, 1); + buf = b; + } + return (buf); +} + +function bigintToMpBuf(bigint) { + var buf = Buffer.from(bigint.toByteArray()); + buf = mpNormalize(buf); + return (buf); +} + +function calculateDSAPublic(g, p, x) { + assert.buffer(g); + assert.buffer(p); + assert.buffer(x); + try { + var bigInt = __webpack_require__(81).BigInteger; + } catch (e) { + throw (new Error('To load a PKCS#8 format DSA private key, ' + + 'the node jsbn library is required.')); + } + g = new bigInt(g); + p = new bigInt(p); + x = new bigInt(x); + var y = g.modPow(x, p); + var ybuf = bigintToMpBuf(y); + return (ybuf); +} + +function calculateED25519Public(k) { + assert.buffer(k); + + if (nacl === undefined) + nacl = __webpack_require__(76); + + var kp = nacl.sign.keyPair.fromSeed(new Uint8Array(k)); + return (Buffer.from(kp.publicKey)); +} + +function calculateX25519Public(k) { + assert.buffer(k); + + if (nacl === undefined) + nacl = __webpack_require__(76); + + var kp = nacl.box.keyPair.fromSeed(new Uint8Array(k)); + return (Buffer.from(kp.publicKey)); +} + +function addRSAMissing(key) { + assert.object(key); + assertCompatible(key, PrivateKey, [1, 1]); + try { + var bigInt = __webpack_require__(81).BigInteger; + } catch (e) { + throw (new Error('To write a PEM private key from ' + + 'this source, the node jsbn lib is required.')); + } + + var d = new bigInt(key.part.d.data); + var buf; + + if (!key.part.dmodp) { + var p = new bigInt(key.part.p.data); + var dmodp = d.mod(p.subtract(1)); + + buf = bigintToMpBuf(dmodp); + key.part.dmodp = {name: 'dmodp', data: buf}; + key.parts.push(key.part.dmodp); + } + if (!key.part.dmodq) { + var q = new bigInt(key.part.q.data); + var dmodq = d.mod(q.subtract(1)); + + buf = bigintToMpBuf(dmodq); + key.part.dmodq = {name: 'dmodq', data: buf}; + key.parts.push(key.part.dmodq); + } +} + +function publicFromPrivateECDSA(curveName, priv) { + assert.string(curveName, 'curveName'); + assert.buffer(priv); + if (ec === undefined) + ec = __webpack_require__(139); + if (jsbn === undefined) + jsbn = __webpack_require__(81).BigInteger; + var params = algs.curves[curveName]; + var p = new jsbn(params.p); + var a = new jsbn(params.a); + var b = new jsbn(params.b); + var curve = new ec.ECCurveFp(p, a, b); + var G = curve.decodePointHex(params.G.toString('hex')); + + var d = new jsbn(mpNormalize(priv)); + var pub = G.multiply(d); + pub = Buffer.from(curve.encodePointHex(pub), 'hex'); + + var parts = []; + parts.push({name: 'curve', data: Buffer.from(curveName)}); + parts.push({name: 'Q', data: pub}); + + var key = new Key({type: 'ecdsa', curve: curve, parts: parts}); + return (key); +} + +function opensshCipherInfo(cipher) { + var inf = {}; + switch (cipher) { + case '3des-cbc': + inf.keySize = 24; + inf.blockSize = 8; + inf.opensslName = 'des-ede3-cbc'; + break; + case 'blowfish-cbc': + inf.keySize = 16; + inf.blockSize = 8; + inf.opensslName = 'bf-cbc'; + break; + case 'aes128-cbc': + case 'aes128-ctr': + case 'aes128-gcm@openssh.com': + inf.keySize = 16; + inf.blockSize = 16; + inf.opensslName = 'aes-128-' + cipher.slice(7, 10); + break; + case 'aes192-cbc': + case 'aes192-ctr': + case 'aes192-gcm@openssh.com': + inf.keySize = 24; + inf.blockSize = 16; + inf.opensslName = 'aes-192-' + cipher.slice(7, 10); + break; + case 'aes256-cbc': + case 'aes256-ctr': + case 'aes256-gcm@openssh.com': + inf.keySize = 32; + inf.blockSize = 16; + inf.opensslName = 'aes-256-' + cipher.slice(7, 10); + break; + default: + throw (new Error( + 'Unsupported openssl cipher "' + cipher + '"')); + } + return (inf); +} + + +/***/ }), +/* 27 */ +/***/ (function(module, exports, __webpack_require__) { + +// Copyright 2017 Joyent, Inc. + +module.exports = Key; + +var assert = __webpack_require__(16); +var algs = __webpack_require__(32); +var crypto = __webpack_require__(11); +var Fingerprint = __webpack_require__(156); +var Signature = __webpack_require__(75); +var DiffieHellman = __webpack_require__(325).DiffieHellman; +var errs = __webpack_require__(74); +var utils = __webpack_require__(26); +var PrivateKey = __webpack_require__(33); +var edCompat; + +try { + edCompat = __webpack_require__(454); +} catch (e) { + /* Just continue through, and bail out if we try to use it. */ +} + +var InvalidAlgorithmError = errs.InvalidAlgorithmError; +var KeyParseError = errs.KeyParseError; + +var formats = {}; +formats['auto'] = __webpack_require__(455); +formats['pem'] = __webpack_require__(86); +formats['pkcs1'] = __webpack_require__(327); +formats['pkcs8'] = __webpack_require__(157); +formats['rfc4253'] = __webpack_require__(103); +formats['ssh'] = __webpack_require__(456); +formats['ssh-private'] = __webpack_require__(192); +formats['openssh'] = formats['ssh-private']; +formats['dnssec'] = __webpack_require__(326); + +function Key(opts) { + assert.object(opts, 'options'); + assert.arrayOfObject(opts.parts, 'options.parts'); + assert.string(opts.type, 'options.type'); + assert.optionalString(opts.comment, 'options.comment'); + + var algInfo = algs.info[opts.type]; + if (typeof (algInfo) !== 'object') + throw (new InvalidAlgorithmError(opts.type)); + + var partLookup = {}; + for (var i = 0; i < opts.parts.length; ++i) { + var part = opts.parts[i]; + partLookup[part.name] = part; + } + + this.type = opts.type; + this.parts = opts.parts; + this.part = partLookup; + this.comment = undefined; + this.source = opts.source; + + /* for speeding up hashing/fingerprint operations */ + this._rfc4253Cache = opts._rfc4253Cache; + this._hashCache = {}; + + var sz; + this.curve = undefined; + if (this.type === 'ecdsa') { + var curve = this.part.curve.data.toString(); + this.curve = curve; + sz = algs.curves[curve].size; + } else if (this.type === 'ed25519' || this.type === 'curve25519') { + sz = 256; + this.curve = 'curve25519'; + } else { + var szPart = this.part[algInfo.sizePart]; + sz = szPart.data.length; + sz = sz * 8 - utils.countZeros(szPart.data); + } + this.size = sz; +} + +Key.formats = formats; + +Key.prototype.toBuffer = function (format, options) { + if (format === undefined) + format = 'ssh'; + assert.string(format, 'format'); + assert.object(formats[format], 'formats[format]'); + assert.optionalObject(options, 'options'); + + if (format === 'rfc4253') { + if (this._rfc4253Cache === undefined) + this._rfc4253Cache = formats['rfc4253'].write(this); + return (this._rfc4253Cache); + } + + return (formats[format].write(this, options)); +}; + +Key.prototype.toString = function (format, options) { + return (this.toBuffer(format, options).toString()); +}; + +Key.prototype.hash = function (algo) { + assert.string(algo, 'algorithm'); + algo = algo.toLowerCase(); + if (algs.hashAlgs[algo] === undefined) + throw (new InvalidAlgorithmError(algo)); + + if (this._hashCache[algo]) + return (this._hashCache[algo]); + var hash = crypto.createHash(algo). + update(this.toBuffer('rfc4253')).digest(); + this._hashCache[algo] = hash; + return (hash); +}; + +Key.prototype.fingerprint = function (algo) { + if (algo === undefined) + algo = 'sha256'; + assert.string(algo, 'algorithm'); + var opts = { + type: 'key', + hash: this.hash(algo), + algorithm: algo + }; + return (new Fingerprint(opts)); +}; + +Key.prototype.defaultHashAlgorithm = function () { + var hashAlgo = 'sha1'; + if (this.type === 'rsa') + hashAlgo = 'sha256'; + if (this.type === 'dsa' && this.size > 1024) + hashAlgo = 'sha256'; + if (this.type === 'ed25519') + hashAlgo = 'sha512'; + if (this.type === 'ecdsa') { + if (this.size <= 256) + hashAlgo = 'sha256'; + else if (this.size <= 384) + hashAlgo = 'sha384'; + else + hashAlgo = 'sha512'; + } + return (hashAlgo); +}; + +Key.prototype.createVerify = function (hashAlgo) { + if (hashAlgo === undefined) + hashAlgo = this.defaultHashAlgorithm(); + assert.string(hashAlgo, 'hash algorithm'); + + /* ED25519 is not supported by OpenSSL, use a javascript impl. */ + if (this.type === 'ed25519' && edCompat !== undefined) + return (new edCompat.Verifier(this, hashAlgo)); + if (this.type === 'curve25519') + throw (new Error('Curve25519 keys are not suitable for ' + + 'signing or verification')); + + var v, nm, err; + try { + nm = hashAlgo.toUpperCase(); + v = crypto.createVerify(nm); + } catch (e) { + err = e; + } + if (v === undefined || (err instanceof Error && + err.message.match(/Unknown message digest/))) { + nm = 'RSA-'; + nm += hashAlgo.toUpperCase(); + v = crypto.createVerify(nm); + } + assert.ok(v, 'failed to create verifier'); + var oldVerify = v.verify.bind(v); + var key = this.toBuffer('pkcs8'); + var curve = this.curve; + var self = this; + v.verify = function (signature, fmt) { + if (Signature.isSignature(signature, [2, 0])) { + if (signature.type !== self.type) + return (false); + if (signature.hashAlgorithm && + signature.hashAlgorithm !== hashAlgo) + return (false); + if (signature.curve && self.type === 'ecdsa' && + signature.curve !== curve) + return (false); + return (oldVerify(key, signature.toBuffer('asn1'))); + + } else if (typeof (signature) === 'string' || + Buffer.isBuffer(signature)) { + return (oldVerify(key, signature, fmt)); + + /* + * Avoid doing this on valid arguments, walking the prototype + * chain can be quite slow. + */ + } else if (Signature.isSignature(signature, [1, 0])) { + throw (new Error('signature was created by too old ' + + 'a version of sshpk and cannot be verified')); + + } else { + throw (new TypeError('signature must be a string, ' + + 'Buffer, or Signature object')); + } + }; + return (v); +}; + +Key.prototype.createDiffieHellman = function () { + if (this.type === 'rsa') + throw (new Error('RSA keys do not support Diffie-Hellman')); + + return (new DiffieHellman(this)); +}; +Key.prototype.createDH = Key.prototype.createDiffieHellman; + +Key.parse = function (data, format, options) { + if (typeof (data) !== 'string') + assert.buffer(data, 'data'); + if (format === undefined) + format = 'auto'; + assert.string(format, 'format'); + if (typeof (options) === 'string') + options = { filename: options }; + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalString(options.filename, 'options.filename'); + if (options.filename === undefined) + options.filename = '(unnamed)'; + + assert.object(formats[format], 'formats[format]'); + + try { + var k = formats[format].read(data, options); + if (k instanceof PrivateKey) + k = k.toPublic(); + if (!k.comment) + k.comment = options.filename; + return (k); + } catch (e) { + if (e.name === 'KeyEncryptedError') + throw (e); + throw (new KeyParseError(options.filename, format, e)); + } +}; + +Key.isKey = function (obj, ver) { + return (utils.isCompatible(obj, Key, ver)); +}; + +/* + * API versions for Key: + * [1,0] -- initial ver, may take Signature for createVerify or may not + * [1,1] -- added pkcs1, pkcs8 formats + * [1,2] -- added auto, ssh-private, openssh formats + * [1,3] -- added defaultHashAlgorithm + * [1,4] -- added ed support, createDH + * [1,5] -- first explicitly tagged version + * [1,6] -- changed ed25519 part names + */ +Key.prototype._sshpkApiVersion = [1, 6]; + +Key._oldVersionDetect = function (obj) { + assert.func(obj.toBuffer); + assert.func(obj.fingerprint); + if (obj.createDH) + return ([1, 4]); + if (obj.defaultHashAlgorithm) + return ([1, 3]); + if (obj.formats['auto']) + return ([1, 2]); + if (obj.formats['pkcs1']) + return ([1, 1]); + return ([1, 0]); +}; + + +/***/ }), +/* 28 */ +/***/ (function(module, exports) { + +module.exports = require("assert"); + +/***/ }), +/* 29 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = nullify; +function nullify(obj = {}) { + if (Array.isArray(obj)) { + for (var _iterator = obj, _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : _iterator[Symbol.iterator]();;) { + var _ref; + + if (_isArray) { + if (_i >= _iterator.length) break; + _ref = _iterator[_i++]; + } else { + _i = _iterator.next(); + if (_i.done) break; + _ref = _i.value; + } + + const item = _ref; + + nullify(item); + } + } else if (obj !== null && typeof obj === 'object' || typeof obj === 'function') { + Object.setPrototypeOf(obj, null); + + // for..in can only be applied to 'object', not 'function' + if (typeof obj === 'object') { + for (const key in obj) { + nullify(obj[key]); + } + } + } + + return obj; +} + +/***/ }), +/* 30 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + +const escapeStringRegexp = __webpack_require__(388); +const ansiStyles = __webpack_require__(506); +const stdoutColor = __webpack_require__(598).stdout; + +const template = __webpack_require__(599); + +const isSimpleWindowsTerm = process.platform === 'win32' && !(process.env.TERM || '').toLowerCase().startsWith('xterm'); + +// `supportsColor.level` → `ansiStyles.color[name]` mapping +const levelMapping = ['ansi', 'ansi', 'ansi256', 'ansi16m']; + +// `color-convert` models to exclude from the Chalk API due to conflicts and such +const skipModels = new Set(['gray']); + +const styles = Object.create(null); + +function applyOptions(obj, options) { + options = options || {}; + + // Detect level if not set manually + const scLevel = stdoutColor ? stdoutColor.level : 0; + obj.level = options.level === undefined ? scLevel : options.level; + obj.enabled = 'enabled' in options ? options.enabled : obj.level > 0; +} + +function Chalk(options) { + // We check for this.template here since calling `chalk.constructor()` + // by itself will have a `this` of a previously constructed chalk object + if (!this || !(this instanceof Chalk) || this.template) { + const chalk = {}; + applyOptions(chalk, options); + + chalk.template = function () { + const args = [].slice.call(arguments); + return chalkTag.apply(null, [chalk.template].concat(args)); + }; + + Object.setPrototypeOf(chalk, Chalk.prototype); + Object.setPrototypeOf(chalk.template, chalk); + + chalk.template.constructor = Chalk; + + return chalk.template; + } + + applyOptions(this, options); +} + +// Use bright blue on Windows as the normal blue color is illegible +if (isSimpleWindowsTerm) { + ansiStyles.blue.open = '\u001B[94m'; +} + +for (const key of Object.keys(ansiStyles)) { + ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); + + styles[key] = { + get() { + const codes = ansiStyles[key]; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, key); + } + }; +} + +styles.visible = { + get() { + return build.call(this, this._styles || [], true, 'visible'); + } +}; + +ansiStyles.color.closeRe = new RegExp(escapeStringRegexp(ansiStyles.color.close), 'g'); +for (const model of Object.keys(ansiStyles.color.ansi)) { + if (skipModels.has(model)) { + continue; + } + + styles[model] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.color[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.color.close, + closeRe: ansiStyles.color.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +ansiStyles.bgColor.closeRe = new RegExp(escapeStringRegexp(ansiStyles.bgColor.close), 'g'); +for (const model of Object.keys(ansiStyles.bgColor.ansi)) { + if (skipModels.has(model)) { + continue; + } + + const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1); + styles[bgModel] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.bgColor[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.bgColor.close, + closeRe: ansiStyles.bgColor.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +const proto = Object.defineProperties(() => {}, styles); + +function build(_styles, _empty, key) { + const builder = function () { + return applyStyle.apply(builder, arguments); + }; + + builder._styles = _styles; + builder._empty = _empty; + + const self = this; + + Object.defineProperty(builder, 'level', { + enumerable: true, + get() { + return self.level; + }, + set(level) { + self.level = level; + } + }); + + Object.defineProperty(builder, 'enabled', { + enumerable: true, + get() { + return self.enabled; + }, + set(enabled) { + self.enabled = enabled; + } + }); + + // See below for fix regarding invisible grey/dim combination on Windows + builder.hasGrey = this.hasGrey || key === 'gray' || key === 'grey'; + + // `__proto__` is used because we must return a function, but there is + // no way to create a function with a different prototype + builder.__proto__ = proto; // eslint-disable-line no-proto + + return builder; +} + +function applyStyle() { + // Support varags, but simply cast to string in case there's only one arg + const args = arguments; + const argsLen = args.length; + let str = String(arguments[0]); + + if (argsLen === 0) { + return ''; + } + + if (argsLen > 1) { + // Don't slice `arguments`, it prevents V8 optimizations + for (let a = 1; a < argsLen; a++) { + str += ' ' + args[a]; + } + } + + if (!this.enabled || this.level <= 0 || !str) { + return this._empty ? '' : str; + } + + // Turns out that on Windows dimmed gray text becomes invisible in cmd.exe, + // see https://github.com/chalk/chalk/issues/58 + // If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop. + const originalDim = ansiStyles.dim.open; + if (isSimpleWindowsTerm && this.hasGrey) { + ansiStyles.dim.open = ''; + } + + for (const code of this._styles.slice().reverse()) { + // Replace any instances already present with a re-opening code + // otherwise only the part of the string until said closing code + // will be colored, and the rest will simply be 'plain'. + str = code.open + str.replace(code.closeRe, code.open) + code.close; + + // Close the styling before a linebreak and reopen + // after next line to fix a bleed issue on macOS + // https://github.com/chalk/chalk/pull/92 + str = str.replace(/\r?\n/g, `${code.close}$&${code.open}`); + } + + // Reset the original `dim` if we changed it to work around the Windows dimmed gray issue + ansiStyles.dim.open = originalDim; + + return str; +} + +function chalkTag(chalk, strings) { + if (!Array.isArray(strings)) { + // If chalk() was called by itself or with a string, + // return the string itself as a string. + return [].slice.call(arguments, 1).join(' '); + } + + const args = [].slice.call(arguments, 2); + const parts = [strings.raw[0]]; + + for (let i = 1; i < strings.length; i++) { + parts.push(String(args[i - 1]).replace(/[{}\\]/g, '\\$&')); + parts.push(String(strings.raw[i])); + } + + return template(chalk, parts.join('')); +} + +Object.defineProperties(Chalk.prototype, styles); + +module.exports = Chalk(); // eslint-disable-line new-cap +module.exports.supportsColor = stdoutColor; +module.exports.default = module.exports; // For TypeScript + + +/***/ }), +/* 31 */ +/***/ (function(module, exports) { + +var core = module.exports = { version: '2.5.7' }; +if (typeof __e == 'number') __e = core; // eslint-disable-line no-undef + + +/***/ }), +/* 32 */ +/***/ (function(module, exports, __webpack_require__) { + +// Copyright 2015 Joyent, Inc. + +var Buffer = __webpack_require__(15).Buffer; + +var algInfo = { + 'dsa': { + parts: ['p', 'q', 'g', 'y'], + sizePart: 'p' + }, + 'rsa': { + parts: ['e', 'n'], + sizePart: 'n' + }, + 'ecdsa': { + parts: ['curve', 'Q'], + sizePart: 'Q' + }, + 'ed25519': { + parts: ['A'], + sizePart: 'A' + } +}; +algInfo['curve25519'] = algInfo['ed25519']; + +var algPrivInfo = { + 'dsa': { + parts: ['p', 'q', 'g', 'y', 'x'] + }, + 'rsa': { + parts: ['n', 'e', 'd', 'iqmp', 'p', 'q'] + }, + 'ecdsa': { + parts: ['curve', 'Q', 'd'] + }, + 'ed25519': { + parts: ['A', 'k'] + } +}; +algPrivInfo['curve25519'] = algPrivInfo['ed25519']; + +var hashAlgs = { + 'md5': true, + 'sha1': true, + 'sha256': true, + 'sha384': true, + 'sha512': true +}; + +/* + * Taken from + * http://csrc.nist.gov/groups/ST/toolkit/documents/dss/NISTReCur.pdf + */ +var curves = { + 'nistp256': { + size: 256, + pkcs8oid: '1.2.840.10045.3.1.7', + p: Buffer.from(('00' + + 'ffffffff 00000001 00000000 00000000' + + '00000000 ffffffff ffffffff ffffffff'). + replace(/ /g, ''), 'hex'), + a: Buffer.from(('00' + + 'FFFFFFFF 00000001 00000000 00000000' + + '00000000 FFFFFFFF FFFFFFFF FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: Buffer.from(( + '5ac635d8 aa3a93e7 b3ebbd55 769886bc' + + '651d06b0 cc53b0f6 3bce3c3e 27d2604b'). + replace(/ /g, ''), 'hex'), + s: Buffer.from(('00' + + 'c49d3608 86e70493 6a6678e1 139d26b7' + + '819f7e90'). + replace(/ /g, ''), 'hex'), + n: Buffer.from(('00' + + 'ffffffff 00000000 ffffffff ffffffff' + + 'bce6faad a7179e84 f3b9cac2 fc632551'). + replace(/ /g, ''), 'hex'), + G: Buffer.from(('04' + + '6b17d1f2 e12c4247 f8bce6e5 63a440f2' + + '77037d81 2deb33a0 f4a13945 d898c296' + + '4fe342e2 fe1a7f9b 8ee7eb4a 7c0f9e16' + + '2bce3357 6b315ece cbb64068 37bf51f5'). + replace(/ /g, ''), 'hex') + }, + 'nistp384': { + size: 384, + pkcs8oid: '1.3.132.0.34', + p: Buffer.from(('00' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff fffffffe' + + 'ffffffff 00000000 00000000 ffffffff'). + replace(/ /g, ''), 'hex'), + a: Buffer.from(('00' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFE' + + 'FFFFFFFF 00000000 00000000 FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: Buffer.from(( + 'b3312fa7 e23ee7e4 988e056b e3f82d19' + + '181d9c6e fe814112 0314088f 5013875a' + + 'c656398d 8a2ed19d 2a85c8ed d3ec2aef'). + replace(/ /g, ''), 'hex'), + s: Buffer.from(('00' + + 'a335926a a319a27a 1d00896a 6773a482' + + '7acdac73'). + replace(/ /g, ''), 'hex'), + n: Buffer.from(('00' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff c7634d81 f4372ddf' + + '581a0db2 48b0a77a ecec196a ccc52973'). + replace(/ /g, ''), 'hex'), + G: Buffer.from(('04' + + 'aa87ca22 be8b0537 8eb1c71e f320ad74' + + '6e1d3b62 8ba79b98 59f741e0 82542a38' + + '5502f25d bf55296c 3a545e38 72760ab7' + + '3617de4a 96262c6f 5d9e98bf 9292dc29' + + 'f8f41dbd 289a147c e9da3113 b5f0b8c0' + + '0a60b1ce 1d7e819d 7a431d7c 90ea0e5f'). + replace(/ /g, ''), 'hex') + }, + 'nistp521': { + size: 521, + pkcs8oid: '1.3.132.0.35', + p: Buffer.from(( + '01ffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffff').replace(/ /g, ''), 'hex'), + a: Buffer.from(('01FF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: Buffer.from(('51' + + '953eb961 8e1c9a1f 929a21a0 b68540ee' + + 'a2da725b 99b315f3 b8b48991 8ef109e1' + + '56193951 ec7e937b 1652c0bd 3bb1bf07' + + '3573df88 3d2c34f1 ef451fd4 6b503f00'). + replace(/ /g, ''), 'hex'), + s: Buffer.from(('00' + + 'd09e8800 291cb853 96cc6717 393284aa' + + 'a0da64ba').replace(/ /g, ''), 'hex'), + n: Buffer.from(('01ff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff fffffffa' + + '51868783 bf2f966b 7fcc0148 f709a5d0' + + '3bb5c9b8 899c47ae bb6fb71e 91386409'). + replace(/ /g, ''), 'hex'), + G: Buffer.from(('04' + + '00c6 858e06b7 0404e9cd 9e3ecb66 2395b442' + + '9c648139 053fb521 f828af60 6b4d3dba' + + 'a14b5e77 efe75928 fe1dc127 a2ffa8de' + + '3348b3c1 856a429b f97e7e31 c2e5bd66' + + '0118 39296a78 9a3bc004 5c8a5fb4 2c7d1bd9' + + '98f54449 579b4468 17afbd17 273e662c' + + '97ee7299 5ef42640 c550b901 3fad0761' + + '353c7086 a272c240 88be9476 9fd16650'). + replace(/ /g, ''), 'hex') + } +}; + +module.exports = { + info: algInfo, + privInfo: algPrivInfo, + hashAlgs: hashAlgs, + curves: curves +}; + + +/***/ }), +/* 33 */ +/***/ (function(module, exports, __webpack_require__) { + +// Copyright 2017 Joyent, Inc. + +module.exports = PrivateKey; + +var assert = __webpack_require__(16); +var Buffer = __webpack_require__(15).Buffer; +var algs = __webpack_require__(32); +var crypto = __webpack_require__(11); +var Fingerprint = __webpack_require__(156); +var Signature = __webpack_require__(75); +var errs = __webpack_require__(74); +var util = __webpack_require__(3); +var utils = __webpack_require__(26); +var dhe = __webpack_require__(325); +var generateECDSA = dhe.generateECDSA; +var generateED25519 = dhe.generateED25519; +var edCompat; +var nacl; + +try { + edCompat = __webpack_require__(454); +} catch (e) { + /* Just continue through, and bail out if we try to use it. */ +} + +var Key = __webpack_require__(27); + +var InvalidAlgorithmError = errs.InvalidAlgorithmError; +var KeyParseError = errs.KeyParseError; +var KeyEncryptedError = errs.KeyEncryptedError; + +var formats = {}; +formats['auto'] = __webpack_require__(455); +formats['pem'] = __webpack_require__(86); +formats['pkcs1'] = __webpack_require__(327); +formats['pkcs8'] = __webpack_require__(157); +formats['rfc4253'] = __webpack_require__(103); +formats['ssh-private'] = __webpack_require__(192); +formats['openssh'] = formats['ssh-private']; +formats['ssh'] = formats['ssh-private']; +formats['dnssec'] = __webpack_require__(326); + +function PrivateKey(opts) { + assert.object(opts, 'options'); + Key.call(this, opts); + + this._pubCache = undefined; +} +util.inherits(PrivateKey, Key); + +PrivateKey.formats = formats; + +PrivateKey.prototype.toBuffer = function (format, options) { + if (format === undefined) + format = 'pkcs1'; + assert.string(format, 'format'); + assert.object(formats[format], 'formats[format]'); + assert.optionalObject(options, 'options'); + + return (formats[format].write(this, options)); +}; + +PrivateKey.prototype.hash = function (algo) { + return (this.toPublic().hash(algo)); +}; + +PrivateKey.prototype.toPublic = function () { + if (this._pubCache) + return (this._pubCache); + + var algInfo = algs.info[this.type]; + var pubParts = []; + for (var i = 0; i < algInfo.parts.length; ++i) { + var p = algInfo.parts[i]; + pubParts.push(this.part[p]); + } + + this._pubCache = new Key({ + type: this.type, + source: this, + parts: pubParts + }); + if (this.comment) + this._pubCache.comment = this.comment; + return (this._pubCache); +}; + +PrivateKey.prototype.derive = function (newType) { + assert.string(newType, 'type'); + var priv, pub, pair; + + if (this.type === 'ed25519' && newType === 'curve25519') { + if (nacl === undefined) + nacl = __webpack_require__(76); + + priv = this.part.k.data; + if (priv[0] === 0x00) + priv = priv.slice(1); + + pair = nacl.box.keyPair.fromSecretKey(new Uint8Array(priv)); + pub = Buffer.from(pair.publicKey); + + return (new PrivateKey({ + type: 'curve25519', + parts: [ + { name: 'A', data: utils.mpNormalize(pub) }, + { name: 'k', data: utils.mpNormalize(priv) } + ] + })); + } else if (this.type === 'curve25519' && newType === 'ed25519') { + if (nacl === undefined) + nacl = __webpack_require__(76); + + priv = this.part.k.data; + if (priv[0] === 0x00) + priv = priv.slice(1); + + pair = nacl.sign.keyPair.fromSeed(new Uint8Array(priv)); + pub = Buffer.from(pair.publicKey); + + return (new PrivateKey({ + type: 'ed25519', + parts: [ + { name: 'A', data: utils.mpNormalize(pub) }, + { name: 'k', data: utils.mpNormalize(priv) } + ] + })); + } + throw (new Error('Key derivation not supported from ' + this.type + + ' to ' + newType)); +}; + +PrivateKey.prototype.createVerify = function (hashAlgo) { + return (this.toPublic().createVerify(hashAlgo)); +}; + +PrivateKey.prototype.createSign = function (hashAlgo) { + if (hashAlgo === undefined) + hashAlgo = this.defaultHashAlgorithm(); + assert.string(hashAlgo, 'hash algorithm'); + + /* ED25519 is not supported by OpenSSL, use a javascript impl. */ + if (this.type === 'ed25519' && edCompat !== undefined) + return (new edCompat.Signer(this, hashAlgo)); + if (this.type === 'curve25519') + throw (new Error('Curve25519 keys are not suitable for ' + + 'signing or verification')); + + var v, nm, err; + try { + nm = hashAlgo.toUpperCase(); + v = crypto.createSign(nm); + } catch (e) { + err = e; + } + if (v === undefined || (err instanceof Error && + err.message.match(/Unknown message digest/))) { + nm = 'RSA-'; + nm += hashAlgo.toUpperCase(); + v = crypto.createSign(nm); + } + assert.ok(v, 'failed to create verifier'); + var oldSign = v.sign.bind(v); + var key = this.toBuffer('pkcs1'); + var type = this.type; + var curve = this.curve; + v.sign = function () { + var sig = oldSign(key); + if (typeof (sig) === 'string') + sig = Buffer.from(sig, 'binary'); + sig = Signature.parse(sig, type, 'asn1'); + sig.hashAlgorithm = hashAlgo; + sig.curve = curve; + return (sig); + }; + return (v); +}; + +PrivateKey.parse = function (data, format, options) { + if (typeof (data) !== 'string') + assert.buffer(data, 'data'); + if (format === undefined) + format = 'auto'; + assert.string(format, 'format'); + if (typeof (options) === 'string') + options = { filename: options }; + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalString(options.filename, 'options.filename'); + if (options.filename === undefined) + options.filename = '(unnamed)'; + + assert.object(formats[format], 'formats[format]'); + + try { + var k = formats[format].read(data, options); + assert.ok(k instanceof PrivateKey, 'key is not a private key'); + if (!k.comment) + k.comment = options.filename; + return (k); + } catch (e) { + if (e.name === 'KeyEncryptedError') + throw (e); + throw (new KeyParseError(options.filename, format, e)); + } +}; + +PrivateKey.isPrivateKey = function (obj, ver) { + return (utils.isCompatible(obj, PrivateKey, ver)); +}; + +PrivateKey.generate = function (type, options) { + if (options === undefined) + options = {}; + assert.object(options, 'options'); + + switch (type) { + case 'ecdsa': + if (options.curve === undefined) + options.curve = 'nistp256'; + assert.string(options.curve, 'options.curve'); + return (generateECDSA(options.curve)); + case 'ed25519': + return (generateED25519()); + default: + throw (new Error('Key generation not supported with key ' + + 'type "' + type + '"')); + } +}; + +/* + * API versions for PrivateKey: + * [1,0] -- initial ver + * [1,1] -- added auto, pkcs[18], openssh/ssh-private formats + * [1,2] -- added defaultHashAlgorithm + * [1,3] -- added derive, ed, createDH + * [1,4] -- first tagged version + * [1,5] -- changed ed25519 part names and format + */ +PrivateKey.prototype._sshpkApiVersion = [1, 5]; + +PrivateKey._oldVersionDetect = function (obj) { + assert.func(obj.toPublic); + assert.func(obj.createSign); + if (obj.derive) + return ([1, 3]); + if (obj.defaultHashAlgorithm) + return ([1, 2]); + if (obj.formats['auto']) + return ([1, 1]); + return ([1, 0]); +}; + + +/***/ }), +/* 34 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.wrapLifecycle = exports.run = exports.install = exports.Install = undefined; + +var _extends2; + +function _load_extends() { + return _extends2 = _interopRequireDefault(__webpack_require__(21)); +} + +var _asyncToGenerator2; + +function _load_asyncToGenerator() { + return _asyncToGenerator2 = _interopRequireDefault(__webpack_require__(2)); +} + +let install = exports.install = (() => { + var _ref29 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (config, reporter, flags, lockfile) { + yield wrapLifecycle(config, flags, (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + const install = new Install(flags, config, reporter, lockfile); + yield install.init(); + })); + }); + + return function install(_x7, _x8, _x9, _x10) { + return _ref29.apply(this, arguments); + }; +})(); + +let run = exports.run = (() => { + var _ref31 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (config, reporter, flags, args) { + let lockfile; + let error = 'installCommandRenamed'; + if (flags.lockfile === false) { + lockfile = new (_lockfile || _load_lockfile()).default(); + } else { + lockfile = yield (_lockfile || _load_lockfile()).default.fromDirectory(config.lockfileFolder, reporter); + } + + if (args.length) { + const exampleArgs = args.slice(); + + if (flags.saveDev) { + exampleArgs.push('--dev'); + } + if (flags.savePeer) { + exampleArgs.push('--peer'); + } + if (flags.saveOptional) { + exampleArgs.push('--optional'); + } + if (flags.saveExact) { + exampleArgs.push('--exact'); + } + if (flags.saveTilde) { + exampleArgs.push('--tilde'); + } + let command = 'add'; + if (flags.global) { + error = 'globalFlagRemoved'; + command = 'global add'; + } + throw new (_errors || _load_errors()).MessageError(reporter.lang(error, `yarn ${command} ${exampleArgs.join(' ')}`)); + } + + yield install(config, reporter, flags, lockfile); + }); + + return function run(_x11, _x12, _x13, _x14) { + return _ref31.apply(this, arguments); + }; +})(); + +let wrapLifecycle = exports.wrapLifecycle = (() => { + var _ref32 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (config, flags, factory) { + yield config.executeLifecycleScript('preinstall'); + + yield factory(); + + // npm behaviour, seems kinda funky but yay compatibility + yield config.executeLifecycleScript('install'); + yield config.executeLifecycleScript('postinstall'); + + if (!config.production) { + if (!config.disablePrepublish) { + yield config.executeLifecycleScript('prepublish'); + } + yield config.executeLifecycleScript('prepare'); + } + }); + + return function wrapLifecycle(_x15, _x16, _x17) { + return _ref32.apply(this, arguments); + }; +})(); + +exports.hasWrapper = hasWrapper; +exports.setFlags = setFlags; + +var _objectPath; + +function _load_objectPath() { + return _objectPath = _interopRequireDefault(__webpack_require__(304)); +} + +var _hooks; + +function _load_hooks() { + return _hooks = __webpack_require__(374); +} + +var _index; + +function _load_index() { + return _index = _interopRequireDefault(__webpack_require__(220)); +} + +var _errors; + +function _load_errors() { + return _errors = __webpack_require__(6); +} + +var _integrityChecker; + +function _load_integrityChecker() { + return _integrityChecker = _interopRequireDefault(__webpack_require__(208)); +} + +var _lockfile; + +function _load_lockfile() { + return _lockfile = _interopRequireDefault(__webpack_require__(19)); +} + +var _lockfile2; + +function _load_lockfile2() { + return _lockfile2 = __webpack_require__(19); +} + +var _packageFetcher; + +function _load_packageFetcher() { + return _packageFetcher = _interopRequireWildcard(__webpack_require__(210)); +} + +var _packageInstallScripts; + +function _load_packageInstallScripts() { + return _packageInstallScripts = _interopRequireDefault(__webpack_require__(557)); +} + +var _packageCompatibility; + +function _load_packageCompatibility() { + return _packageCompatibility = _interopRequireWildcard(__webpack_require__(209)); +} + +var _packageResolver; + +function _load_packageResolver() { + return _packageResolver = _interopRequireDefault(__webpack_require__(366)); +} + +var _packageLinker; + +function _load_packageLinker() { + return _packageLinker = _interopRequireDefault(__webpack_require__(211)); +} + +var _index2; + +function _load_index2() { + return _index2 = __webpack_require__(57); +} + +var _index3; + +function _load_index3() { + return _index3 = __webpack_require__(78); +} + +var _autoclean; + +function _load_autoclean() { + return _autoclean = __webpack_require__(354); +} + +var _constants; + +function _load_constants() { + return _constants = _interopRequireWildcard(__webpack_require__(8)); +} + +var _normalizePattern; + +function _load_normalizePattern() { + return _normalizePattern = __webpack_require__(37); +} + +var _fs; + +function _load_fs() { + return _fs = _interopRequireWildcard(__webpack_require__(4)); +} + +var _map; + +function _load_map() { + return _map = _interopRequireDefault(__webpack_require__(29)); +} + +var _yarnVersion; + +function _load_yarnVersion() { + return _yarnVersion = __webpack_require__(120); +} + +var _generatePnpMap; + +function _load_generatePnpMap() { + return _generatePnpMap = __webpack_require__(579); +} + +var _workspaceLayout; + +function _load_workspaceLayout() { + return _workspaceLayout = _interopRequireDefault(__webpack_require__(90)); +} + +var _resolutionMap; + +function _load_resolutionMap() { + return _resolutionMap = _interopRequireDefault(__webpack_require__(214)); +} + +var _guessName; + +function _load_guessName() { + return _guessName = _interopRequireDefault(__webpack_require__(169)); +} + +var _audit; + +function _load_audit() { + return _audit = _interopRequireDefault(__webpack_require__(353)); +} + +function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const deepEqual = __webpack_require__(631); + +const emoji = __webpack_require__(302); +const invariant = __webpack_require__(9); +const path = __webpack_require__(0); +const semver = __webpack_require__(22); +const uuid = __webpack_require__(119); +const ssri = __webpack_require__(65); + +const ONE_DAY = 1000 * 60 * 60 * 24; + +/** + * Try and detect the installation method for Yarn and provide a command to update it with. + */ + +function getUpdateCommand(installationMethod) { + if (installationMethod === 'tar') { + return `curl --compressed -o- -L ${(_constants || _load_constants()).YARN_INSTALLER_SH} | bash`; + } + + if (installationMethod === 'homebrew') { + return 'brew upgrade yarn'; + } + + if (installationMethod === 'deb') { + return 'sudo apt-get update && sudo apt-get install yarn'; + } + + if (installationMethod === 'rpm') { + return 'sudo yum install yarn'; + } + + if (installationMethod === 'npm') { + return 'npm install --global yarn'; + } + + if (installationMethod === 'chocolatey') { + return 'choco upgrade yarn'; + } + + if (installationMethod === 'apk') { + return 'apk update && apk add -u yarn'; + } + + if (installationMethod === 'portage') { + return 'sudo emerge --sync && sudo emerge -au sys-apps/yarn'; + } + + return null; +} + +function getUpdateInstaller(installationMethod) { + // Windows + if (installationMethod === 'msi') { + return (_constants || _load_constants()).YARN_INSTALLER_MSI; + } + + return null; +} + +function normalizeFlags(config, rawFlags) { + const flags = { + // install + har: !!rawFlags.har, + ignorePlatform: !!rawFlags.ignorePlatform, + ignoreEngines: !!rawFlags.ignoreEngines, + ignoreScripts: !!rawFlags.ignoreScripts, + ignoreOptional: !!rawFlags.ignoreOptional, + force: !!rawFlags.force, + flat: !!rawFlags.flat, + lockfile: rawFlags.lockfile !== false, + pureLockfile: !!rawFlags.pureLockfile, + updateChecksums: !!rawFlags.updateChecksums, + skipIntegrityCheck: !!rawFlags.skipIntegrityCheck, + frozenLockfile: !!rawFlags.frozenLockfile, + linkDuplicates: !!rawFlags.linkDuplicates, + checkFiles: !!rawFlags.checkFiles, + audit: !!rawFlags.audit, + + // add + peer: !!rawFlags.peer, + dev: !!rawFlags.dev, + optional: !!rawFlags.optional, + exact: !!rawFlags.exact, + tilde: !!rawFlags.tilde, + ignoreWorkspaceRootCheck: !!rawFlags.ignoreWorkspaceRootCheck, + + // outdated, update-interactive + includeWorkspaceDeps: !!rawFlags.includeWorkspaceDeps, + + // add, remove, update + workspaceRootIsCwd: rawFlags.workspaceRootIsCwd !== false + }; + + if (config.getOption('ignore-scripts')) { + flags.ignoreScripts = true; + } + + if (config.getOption('ignore-platform')) { + flags.ignorePlatform = true; + } + + if (config.getOption('ignore-engines')) { + flags.ignoreEngines = true; + } + + if (config.getOption('ignore-optional')) { + flags.ignoreOptional = true; + } + + if (config.getOption('force')) { + flags.force = true; + } + + return flags; +} + +class Install { + constructor(flags, config, reporter, lockfile) { + this.rootManifestRegistries = []; + this.rootPatternsToOrigin = (0, (_map || _load_map()).default)(); + this.lockfile = lockfile; + this.reporter = reporter; + this.config = config; + this.flags = normalizeFlags(config, flags); + this.resolutions = (0, (_map || _load_map()).default)(); // Legacy resolutions field used for flat install mode + this.resolutionMap = new (_resolutionMap || _load_resolutionMap()).default(config); // Selective resolutions for nested dependencies + this.resolver = new (_packageResolver || _load_packageResolver()).default(config, lockfile, this.resolutionMap); + this.integrityChecker = new (_integrityChecker || _load_integrityChecker()).default(config); + this.linker = new (_packageLinker || _load_packageLinker()).default(config, this.resolver); + this.scripts = new (_packageInstallScripts || _load_packageInstallScripts()).default(config, this.resolver, this.flags.force); + } + + /** + * Create a list of dependency requests from the current directories manifests. + */ + + fetchRequestFromCwd(excludePatterns = [], ignoreUnusedPatterns = false) { + var _this = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + const patterns = []; + const deps = []; + let resolutionDeps = []; + const manifest = {}; + + const ignorePatterns = []; + const usedPatterns = []; + let workspaceLayout; + + // some commands should always run in the context of the entire workspace + const cwd = _this.flags.includeWorkspaceDeps || _this.flags.workspaceRootIsCwd ? _this.config.lockfileFolder : _this.config.cwd; + + // non-workspaces are always root, otherwise check for workspace root + const cwdIsRoot = !_this.config.workspaceRootFolder || _this.config.lockfileFolder === cwd; + + // exclude package names that are in install args + const excludeNames = []; + for (var _iterator = excludePatterns, _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : _iterator[Symbol.iterator]();;) { + var _ref; + + if (_isArray) { + if (_i >= _iterator.length) break; + _ref = _iterator[_i++]; + } else { + _i = _iterator.next(); + if (_i.done) break; + _ref = _i.value; + } + + const pattern = _ref; + + if ((0, (_index3 || _load_index3()).getExoticResolver)(pattern)) { + excludeNames.push((0, (_guessName || _load_guessName()).default)(pattern)); + } else { + // extract the name + const parts = (0, (_normalizePattern || _load_normalizePattern()).normalizePattern)(pattern); + excludeNames.push(parts.name); + } + } + + const stripExcluded = function stripExcluded(manifest) { + for (var _iterator2 = excludeNames, _isArray2 = Array.isArray(_iterator2), _i2 = 0, _iterator2 = _isArray2 ? _iterator2 : _iterator2[Symbol.iterator]();;) { + var _ref2; + + if (_isArray2) { + if (_i2 >= _iterator2.length) break; + _ref2 = _iterator2[_i2++]; + } else { + _i2 = _iterator2.next(); + if (_i2.done) break; + _ref2 = _i2.value; + } + + const exclude = _ref2; + + if (manifest.dependencies && manifest.dependencies[exclude]) { + delete manifest.dependencies[exclude]; + } + if (manifest.devDependencies && manifest.devDependencies[exclude]) { + delete manifest.devDependencies[exclude]; + } + if (manifest.optionalDependencies && manifest.optionalDependencies[exclude]) { + delete manifest.optionalDependencies[exclude]; + } + } + }; + + for (var _iterator3 = Object.keys((_index2 || _load_index2()).registries), _isArray3 = Array.isArray(_iterator3), _i3 = 0, _iterator3 = _isArray3 ? _iterator3 : _iterator3[Symbol.iterator]();;) { + var _ref3; + + if (_isArray3) { + if (_i3 >= _iterator3.length) break; + _ref3 = _iterator3[_i3++]; + } else { + _i3 = _iterator3.next(); + if (_i3.done) break; + _ref3 = _i3.value; + } + + const registry = _ref3; + + const filename = (_index2 || _load_index2()).registries[registry].filename; + + const loc = path.join(cwd, filename); + if (!(yield (_fs || _load_fs()).exists(loc))) { + continue; + } + + _this.rootManifestRegistries.push(registry); + + const projectManifestJson = yield _this.config.readJson(loc); + yield (0, (_index || _load_index()).default)(projectManifestJson, cwd, _this.config, cwdIsRoot); + + Object.assign(_this.resolutions, projectManifestJson.resolutions); + Object.assign(manifest, projectManifestJson); + + _this.resolutionMap.init(_this.resolutions); + for (var _iterator4 = Object.keys(_this.resolutionMap.resolutionsByPackage), _isArray4 = Array.isArray(_iterator4), _i4 = 0, _iterator4 = _isArray4 ? _iterator4 : _iterator4[Symbol.iterator]();;) { + var _ref4; + + if (_isArray4) { + if (_i4 >= _iterator4.length) break; + _ref4 = _iterator4[_i4++]; + } else { + _i4 = _iterator4.next(); + if (_i4.done) break; + _ref4 = _i4.value; + } + + const packageName = _ref4; + + const optional = (_objectPath || _load_objectPath()).default.has(manifest.optionalDependencies, packageName) && _this.flags.ignoreOptional; + for (var _iterator8 = _this.resolutionMap.resolutionsByPackage[packageName], _isArray8 = Array.isArray(_iterator8), _i8 = 0, _iterator8 = _isArray8 ? _iterator8 : _iterator8[Symbol.iterator]();;) { + var _ref9; + + if (_isArray8) { + if (_i8 >= _iterator8.length) break; + _ref9 = _iterator8[_i8++]; + } else { + _i8 = _iterator8.next(); + if (_i8.done) break; + _ref9 = _i8.value; + } + + const _ref8 = _ref9; + const pattern = _ref8.pattern; + + resolutionDeps = [...resolutionDeps, { registry, pattern, optional, hint: 'resolution' }]; + } + } + + const pushDeps = function pushDeps(depType, manifest, { hint, optional }, isUsed) { + if (ignoreUnusedPatterns && !isUsed) { + return; + } + // We only take unused dependencies into consideration to get deterministic hoisting. + // Since flat mode doesn't care about hoisting and everything is top level and specified then we can safely + // leave these out. + if (_this.flags.flat && !isUsed) { + return; + } + const depMap = manifest[depType]; + for (const name in depMap) { + if (excludeNames.indexOf(name) >= 0) { + continue; + } + + let pattern = name; + if (!_this.lockfile.getLocked(pattern)) { + // when we use --save we save the dependency to the lockfile with just the name rather than the + // version combo + pattern += '@' + depMap[name]; + } + + // normalization made sure packages are mentioned only once + if (isUsed) { + usedPatterns.push(pattern); + } else { + ignorePatterns.push(pattern); + } + + _this.rootPatternsToOrigin[pattern] = depType; + patterns.push(pattern); + deps.push({ pattern, registry, hint, optional, workspaceName: manifest.name, workspaceLoc: manifest._loc }); + } + }; + + if (cwdIsRoot) { + pushDeps('dependencies', projectManifestJson, { hint: null, optional: false }, true); + pushDeps('devDependencies', projectManifestJson, { hint: 'dev', optional: false }, !_this.config.production); + pushDeps('optionalDependencies', projectManifestJson, { hint: 'optional', optional: true }, true); + } + + if (_this.config.workspaceRootFolder) { + const workspaceLoc = cwdIsRoot ? loc : path.join(_this.config.lockfileFolder, filename); + const workspacesRoot = path.dirname(workspaceLoc); + + let workspaceManifestJson = projectManifestJson; + if (!cwdIsRoot) { + // the manifest we read before was a child workspace, so get the root + workspaceManifestJson = yield _this.config.readJson(workspaceLoc); + yield (0, (_index || _load_index()).default)(workspaceManifestJson, workspacesRoot, _this.config, true); + } + + const workspaces = yield _this.config.resolveWorkspaces(workspacesRoot, workspaceManifestJson); + workspaceLayout = new (_workspaceLayout || _load_workspaceLayout()).default(workspaces, _this.config); + + // add virtual manifest that depends on all workspaces, this way package hoisters and resolvers will work fine + const workspaceDependencies = (0, (_extends2 || _load_extends()).default)({}, workspaceManifestJson.dependencies); + for (var _iterator5 = Object.keys(workspaces), _isArray5 = Array.isArray(_iterator5), _i5 = 0, _iterator5 = _isArray5 ? _iterator5 : _iterator5[Symbol.iterator]();;) { + var _ref5; + + if (_isArray5) { + if (_i5 >= _iterator5.length) break; + _ref5 = _iterator5[_i5++]; + } else { + _i5 = _iterator5.next(); + if (_i5.done) break; + _ref5 = _i5.value; + } + + const workspaceName = _ref5; + + const workspaceManifest = workspaces[workspaceName].manifest; + workspaceDependencies[workspaceName] = workspaceManifest.version; + + // include dependencies from all workspaces + if (_this.flags.includeWorkspaceDeps) { + pushDeps('dependencies', workspaceManifest, { hint: null, optional: false }, true); + pushDeps('devDependencies', workspaceManifest, { hint: 'dev', optional: false }, !_this.config.production); + pushDeps('optionalDependencies', workspaceManifest, { hint: 'optional', optional: true }, true); + } + } + const virtualDependencyManifest = { + _uid: '', + name: `workspace-aggregator-${uuid.v4()}`, + version: '1.0.0', + _registry: 'npm', + _loc: workspacesRoot, + dependencies: workspaceDependencies, + devDependencies: (0, (_extends2 || _load_extends()).default)({}, workspaceManifestJson.devDependencies), + optionalDependencies: (0, (_extends2 || _load_extends()).default)({}, workspaceManifestJson.optionalDependencies), + private: workspaceManifestJson.private, + workspaces: workspaceManifestJson.workspaces + }; + workspaceLayout.virtualManifestName = virtualDependencyManifest.name; + const virtualDep = {}; + virtualDep[virtualDependencyManifest.name] = virtualDependencyManifest.version; + workspaces[virtualDependencyManifest.name] = { loc: workspacesRoot, manifest: virtualDependencyManifest }; + + // ensure dependencies that should be excluded are stripped from the correct manifest + stripExcluded(cwdIsRoot ? virtualDependencyManifest : workspaces[projectManifestJson.name].manifest); + + pushDeps('workspaces', { workspaces: virtualDep }, { hint: 'workspaces', optional: false }, true); + + const implicitWorkspaceDependencies = (0, (_extends2 || _load_extends()).default)({}, workspaceDependencies); + + for (var _iterator6 = (_constants || _load_constants()).OWNED_DEPENDENCY_TYPES, _isArray6 = Array.isArray(_iterator6), _i6 = 0, _iterator6 = _isArray6 ? _iterator6 : _iterator6[Symbol.iterator]();;) { + var _ref6; + + if (_isArray6) { + if (_i6 >= _iterator6.length) break; + _ref6 = _iterator6[_i6++]; + } else { + _i6 = _iterator6.next(); + if (_i6.done) break; + _ref6 = _i6.value; + } + + const type = _ref6; + + for (var _iterator7 = Object.keys(projectManifestJson[type] || {}), _isArray7 = Array.isArray(_iterator7), _i7 = 0, _iterator7 = _isArray7 ? _iterator7 : _iterator7[Symbol.iterator]();;) { + var _ref7; + + if (_isArray7) { + if (_i7 >= _iterator7.length) break; + _ref7 = _iterator7[_i7++]; + } else { + _i7 = _iterator7.next(); + if (_i7.done) break; + _ref7 = _i7.value; + } + + const dependencyName = _ref7; + + delete implicitWorkspaceDependencies[dependencyName]; + } + } + + pushDeps('dependencies', { dependencies: implicitWorkspaceDependencies }, { hint: 'workspaces', optional: false }, true); + } + + break; + } + + // inherit root flat flag + if (manifest.flat) { + _this.flags.flat = true; + } + + return { + requests: [...resolutionDeps, ...deps], + patterns, + manifest, + usedPatterns, + ignorePatterns, + workspaceLayout + }; + })(); + } + + /** + * TODO description + */ + + prepareRequests(requests) { + return requests; + } + + preparePatterns(patterns) { + return patterns; + } + preparePatternsForLinking(patterns, cwdManifest, cwdIsRoot) { + return patterns; + } + + prepareManifests() { + var _this2 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + const manifests = yield _this2.config.getRootManifests(); + return manifests; + })(); + } + + bailout(patterns, workspaceLayout) { + var _this3 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + // We don't want to skip the audit - it could yield important errors + if (_this3.flags.audit) { + return false; + } + // PNP is so fast that the integrity check isn't pertinent + if (_this3.config.plugnplayEnabled) { + return false; + } + if (_this3.flags.skipIntegrityCheck || _this3.flags.force) { + return false; + } + const lockfileCache = _this3.lockfile.cache; + if (!lockfileCache) { + return false; + } + const lockfileClean = _this3.lockfile.parseResultType === 'success'; + const match = yield _this3.integrityChecker.check(patterns, lockfileCache, _this3.flags, workspaceLayout); + if (_this3.flags.frozenLockfile && (!lockfileClean || match.missingPatterns.length > 0)) { + throw new (_errors || _load_errors()).MessageError(_this3.reporter.lang('frozenLockfileError')); + } + + const haveLockfile = yield (_fs || _load_fs()).exists(path.join(_this3.config.lockfileFolder, (_constants || _load_constants()).LOCKFILE_FILENAME)); + + const lockfileIntegrityPresent = !_this3.lockfile.hasEntriesExistWithoutIntegrity(); + const integrityBailout = lockfileIntegrityPresent || !_this3.config.autoAddIntegrity; + + if (match.integrityMatches && haveLockfile && lockfileClean && integrityBailout) { + _this3.reporter.success(_this3.reporter.lang('upToDate')); + return true; + } + + if (match.integrityFileMissing && haveLockfile) { + // Integrity file missing, force script installations + _this3.scripts.setForce(true); + return false; + } + + if (match.hardRefreshRequired) { + // e.g. node version doesn't match, force script installations + _this3.scripts.setForce(true); + return false; + } + + if (!patterns.length && !match.integrityFileMissing) { + _this3.reporter.success(_this3.reporter.lang('nothingToInstall')); + yield _this3.createEmptyManifestFolders(); + yield _this3.saveLockfileAndIntegrity(patterns, workspaceLayout); + return true; + } + + return false; + })(); + } + + /** + * Produce empty folders for all used root manifests. + */ + + createEmptyManifestFolders() { + var _this4 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + if (_this4.config.modulesFolder) { + // already created + return; + } + + for (var _iterator9 = _this4.rootManifestRegistries, _isArray9 = Array.isArray(_iterator9), _i9 = 0, _iterator9 = _isArray9 ? _iterator9 : _iterator9[Symbol.iterator]();;) { + var _ref10; + + if (_isArray9) { + if (_i9 >= _iterator9.length) break; + _ref10 = _iterator9[_i9++]; + } else { + _i9 = _iterator9.next(); + if (_i9.done) break; + _ref10 = _i9.value; + } + + const registryName = _ref10; + const folder = _this4.config.registries[registryName].folder; + + yield (_fs || _load_fs()).mkdirp(path.join(_this4.config.lockfileFolder, folder)); + } + })(); + } + + /** + * TODO description + */ + + markIgnored(patterns) { + for (var _iterator10 = patterns, _isArray10 = Array.isArray(_iterator10), _i10 = 0, _iterator10 = _isArray10 ? _iterator10 : _iterator10[Symbol.iterator]();;) { + var _ref11; + + if (_isArray10) { + if (_i10 >= _iterator10.length) break; + _ref11 = _iterator10[_i10++]; + } else { + _i10 = _iterator10.next(); + if (_i10.done) break; + _ref11 = _i10.value; + } + + const pattern = _ref11; + + const manifest = this.resolver.getStrictResolvedPattern(pattern); + const ref = manifest._reference; + invariant(ref, 'expected package reference'); + + // just mark the package as ignored. if the package is used by a required package, the hoister + // will take care of that. + ref.ignore = true; + } + } + + /** + * helper method that gets only recent manifests + * used by global.ls command + */ + getFlattenedDeps() { + var _this5 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + var _ref12 = yield _this5.fetchRequestFromCwd(); + + const depRequests = _ref12.requests, + rawPatterns = _ref12.patterns; + + + yield _this5.resolver.init(depRequests, {}); + + const manifests = yield (_packageFetcher || _load_packageFetcher()).fetch(_this5.resolver.getManifests(), _this5.config); + _this5.resolver.updateManifests(manifests); + + return _this5.flatten(rawPatterns); + })(); + } + + /** + * TODO description + */ + + init() { + var _this6 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + _this6.checkUpdate(); + + // warn if we have a shrinkwrap + if (yield (_fs || _load_fs()).exists(path.join(_this6.config.lockfileFolder, (_constants || _load_constants()).NPM_SHRINKWRAP_FILENAME))) { + _this6.reporter.warn(_this6.reporter.lang('shrinkwrapWarning')); + } + + // warn if we have an npm lockfile + if (yield (_fs || _load_fs()).exists(path.join(_this6.config.lockfileFolder, (_constants || _load_constants()).NPM_LOCK_FILENAME))) { + _this6.reporter.warn(_this6.reporter.lang('npmLockfileWarning')); + } + + if (_this6.config.plugnplayEnabled) { + _this6.reporter.info(_this6.reporter.lang('plugnplaySuggestV2L1')); + _this6.reporter.info(_this6.reporter.lang('plugnplaySuggestV2L2')); + } + + let flattenedTopLevelPatterns = []; + const steps = []; + + var _ref13 = yield _this6.fetchRequestFromCwd(); + + const depRequests = _ref13.requests, + rawPatterns = _ref13.patterns, + ignorePatterns = _ref13.ignorePatterns, + workspaceLayout = _ref13.workspaceLayout, + manifest = _ref13.manifest; + + let topLevelPatterns = []; + + const artifacts = yield _this6.integrityChecker.getArtifacts(); + if (artifacts) { + _this6.linker.setArtifacts(artifacts); + _this6.scripts.setArtifacts(artifacts); + } + + if ((_packageCompatibility || _load_packageCompatibility()).shouldCheck(manifest, _this6.flags)) { + steps.push((() => { + var _ref14 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (curr, total) { + _this6.reporter.step(curr, total, _this6.reporter.lang('checkingManifest'), emoji.get('mag')); + yield _this6.checkCompatibility(); + }); + + return function (_x, _x2) { + return _ref14.apply(this, arguments); + }; + })()); + } + + const audit = new (_audit || _load_audit()).default(_this6.config, _this6.reporter, { groups: (_constants || _load_constants()).OWNED_DEPENDENCY_TYPES }); + let auditFoundProblems = false; + + steps.push(function (curr, total) { + return (0, (_hooks || _load_hooks()).callThroughHook)('resolveStep', (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + _this6.reporter.step(curr, total, _this6.reporter.lang('resolvingPackages'), emoji.get('mag')); + yield _this6.resolver.init(_this6.prepareRequests(depRequests), { + isFlat: _this6.flags.flat, + isFrozen: _this6.flags.frozenLockfile, + workspaceLayout + }); + topLevelPatterns = _this6.preparePatterns(rawPatterns); + flattenedTopLevelPatterns = yield _this6.flatten(topLevelPatterns); + return { bailout: !_this6.flags.audit && (yield _this6.bailout(topLevelPatterns, workspaceLayout)) }; + })); + }); + + if (_this6.flags.audit) { + steps.push(function (curr, total) { + return (0, (_hooks || _load_hooks()).callThroughHook)('auditStep', (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + _this6.reporter.step(curr, total, _this6.reporter.lang('auditRunning'), emoji.get('mag')); + if (_this6.flags.offline) { + _this6.reporter.warn(_this6.reporter.lang('auditOffline')); + return { bailout: false }; + } + const preparedManifests = yield _this6.prepareManifests(); + // $FlowFixMe - Flow considers `m` in the map operation to be "mixed", so does not recognize `m.object` + const mergedManifest = Object.assign({}, ...Object.values(preparedManifests).map(function (m) { + return m.object; + })); + const auditVulnerabilityCounts = yield audit.performAudit(mergedManifest, _this6.lockfile, _this6.resolver, _this6.linker, topLevelPatterns); + auditFoundProblems = auditVulnerabilityCounts.info || auditVulnerabilityCounts.low || auditVulnerabilityCounts.moderate || auditVulnerabilityCounts.high || auditVulnerabilityCounts.critical; + return { bailout: yield _this6.bailout(topLevelPatterns, workspaceLayout) }; + })); + }); + } + + steps.push(function (curr, total) { + return (0, (_hooks || _load_hooks()).callThroughHook)('fetchStep', (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + _this6.markIgnored(ignorePatterns); + _this6.reporter.step(curr, total, _this6.reporter.lang('fetchingPackages'), emoji.get('truck')); + const manifests = yield (_packageFetcher || _load_packageFetcher()).fetch(_this6.resolver.getManifests(), _this6.config); + _this6.resolver.updateManifests(manifests); + yield (_packageCompatibility || _load_packageCompatibility()).check(_this6.resolver.getManifests(), _this6.config, _this6.flags.ignoreEngines); + })); + }); + + steps.push(function (curr, total) { + return (0, (_hooks || _load_hooks()).callThroughHook)('linkStep', (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + // remove integrity hash to make this operation atomic + yield _this6.integrityChecker.removeIntegrityFile(); + _this6.reporter.step(curr, total, _this6.reporter.lang('linkingDependencies'), emoji.get('link')); + flattenedTopLevelPatterns = _this6.preparePatternsForLinking(flattenedTopLevelPatterns, manifest, _this6.config.lockfileFolder === _this6.config.cwd); + yield _this6.linker.init(flattenedTopLevelPatterns, workspaceLayout, { + linkDuplicates: _this6.flags.linkDuplicates, + ignoreOptional: _this6.flags.ignoreOptional + }); + })); + }); + + if (_this6.config.plugnplayEnabled) { + steps.push(function (curr, total) { + return (0, (_hooks || _load_hooks()).callThroughHook)('pnpStep', (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + const pnpPath = `${_this6.config.lockfileFolder}/${(_constants || _load_constants()).PNP_FILENAME}`; + + const code = yield (0, (_generatePnpMap || _load_generatePnpMap()).generatePnpMap)(_this6.config, flattenedTopLevelPatterns, { + resolver: _this6.resolver, + reporter: _this6.reporter, + targetPath: pnpPath, + workspaceLayout + }); + + try { + const file = yield (_fs || _load_fs()).readFile(pnpPath); + if (file === code) { + return; + } + } catch (error) {} + + yield (_fs || _load_fs()).writeFile(pnpPath, code); + yield (_fs || _load_fs()).chmod(pnpPath, 0o755); + })); + }); + } + + steps.push(function (curr, total) { + return (0, (_hooks || _load_hooks()).callThroughHook)('buildStep', (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + _this6.reporter.step(curr, total, _this6.flags.force ? _this6.reporter.lang('rebuildingPackages') : _this6.reporter.lang('buildingFreshPackages'), emoji.get('hammer')); + + if (_this6.config.ignoreScripts) { + _this6.reporter.warn(_this6.reporter.lang('ignoredScripts')); + } else { + yield _this6.scripts.init(flattenedTopLevelPatterns); + } + })); + }); + + if (_this6.flags.har) { + steps.push((() => { + var _ref21 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (curr, total) { + const formattedDate = new Date().toISOString().replace(/:/g, '-'); + const filename = `yarn-install_${formattedDate}.har`; + _this6.reporter.step(curr, total, _this6.reporter.lang('savingHar', filename), emoji.get('black_circle_for_record')); + yield _this6.config.requestManager.saveHar(filename); + }); + + return function (_x3, _x4) { + return _ref21.apply(this, arguments); + }; + })()); + } + + if (yield _this6.shouldClean()) { + steps.push((() => { + var _ref22 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (curr, total) { + _this6.reporter.step(curr, total, _this6.reporter.lang('cleaningModules'), emoji.get('recycle')); + yield (0, (_autoclean || _load_autoclean()).clean)(_this6.config, _this6.reporter); + }); + + return function (_x5, _x6) { + return _ref22.apply(this, arguments); + }; + })()); + } + + let currentStep = 0; + for (var _iterator11 = steps, _isArray11 = Array.isArray(_iterator11), _i11 = 0, _iterator11 = _isArray11 ? _iterator11 : _iterator11[Symbol.iterator]();;) { + var _ref23; + + if (_isArray11) { + if (_i11 >= _iterator11.length) break; + _ref23 = _iterator11[_i11++]; + } else { + _i11 = _iterator11.next(); + if (_i11.done) break; + _ref23 = _i11.value; + } + + const step = _ref23; + + const stepResult = yield step(++currentStep, steps.length); + if (stepResult && stepResult.bailout) { + if (_this6.flags.audit) { + audit.summary(); + } + if (auditFoundProblems) { + _this6.reporter.warn(_this6.reporter.lang('auditRunAuditForDetails')); + } + _this6.maybeOutputUpdate(); + return flattenedTopLevelPatterns; + } + } + + // fin! + if (_this6.flags.audit) { + audit.summary(); + } + if (auditFoundProblems) { + _this6.reporter.warn(_this6.reporter.lang('auditRunAuditForDetails')); + } + yield _this6.saveLockfileAndIntegrity(topLevelPatterns, workspaceLayout); + yield _this6.persistChanges(); + _this6.maybeOutputUpdate(); + _this6.config.requestManager.clearCache(); + return flattenedTopLevelPatterns; + })(); + } + + checkCompatibility() { + var _this7 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + var _ref24 = yield _this7.fetchRequestFromCwd(); + + const manifest = _ref24.manifest; + + yield (_packageCompatibility || _load_packageCompatibility()).checkOne(manifest, _this7.config, _this7.flags.ignoreEngines); + })(); + } + + persistChanges() { + var _this8 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + // get all the different registry manifests in this folder + const manifests = yield _this8.config.getRootManifests(); + + if (yield _this8.applyChanges(manifests)) { + yield _this8.config.saveRootManifests(manifests); + } + })(); + } + + applyChanges(manifests) { + let hasChanged = false; + + if (this.config.plugnplayPersist) { + const object = manifests.npm.object; + + + if (typeof object.installConfig !== 'object') { + object.installConfig = {}; + } + + if (this.config.plugnplayEnabled && object.installConfig.pnp !== true) { + object.installConfig.pnp = true; + hasChanged = true; + } else if (!this.config.plugnplayEnabled && typeof object.installConfig.pnp !== 'undefined') { + delete object.installConfig.pnp; + hasChanged = true; + } + + if (Object.keys(object.installConfig).length === 0) { + delete object.installConfig; + } + } + + return Promise.resolve(hasChanged); + } + + /** + * Check if we should run the cleaning step. + */ + + shouldClean() { + return (_fs || _load_fs()).exists(path.join(this.config.lockfileFolder, (_constants || _load_constants()).CLEAN_FILENAME)); + } + + /** + * TODO + */ + + flatten(patterns) { + var _this9 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + if (!_this9.flags.flat) { + return patterns; + } + + const flattenedPatterns = []; + + for (var _iterator12 = _this9.resolver.getAllDependencyNamesByLevelOrder(patterns), _isArray12 = Array.isArray(_iterator12), _i12 = 0, _iterator12 = _isArray12 ? _iterator12 : _iterator12[Symbol.iterator]();;) { + var _ref25; + + if (_isArray12) { + if (_i12 >= _iterator12.length) break; + _ref25 = _iterator12[_i12++]; + } else { + _i12 = _iterator12.next(); + if (_i12.done) break; + _ref25 = _i12.value; + } + + const name = _ref25; + + const infos = _this9.resolver.getAllInfoForPackageName(name).filter(function (manifest) { + const ref = manifest._reference; + invariant(ref, 'expected package reference'); + return !ref.ignore; + }); + + if (infos.length === 0) { + continue; + } + + if (infos.length === 1) { + // single version of this package + // take out a single pattern as multiple patterns may have resolved to this package + flattenedPatterns.push(_this9.resolver.patternsByPackage[name][0]); + continue; + } + + const options = infos.map(function (info) { + const ref = info._reference; + invariant(ref, 'expected reference'); + return { + // TODO `and is required by {PARENT}`, + name: _this9.reporter.lang('manualVersionResolutionOption', ref.patterns.join(', '), info.version), + + value: info.version + }; + }); + const versions = infos.map(function (info) { + return info.version; + }); + let version; + + const resolutionVersion = _this9.resolutions[name]; + if (resolutionVersion && versions.indexOf(resolutionVersion) >= 0) { + // use json `resolution` version + version = resolutionVersion; + } else { + version = yield _this9.reporter.select(_this9.reporter.lang('manualVersionResolution', name), _this9.reporter.lang('answer'), options); + _this9.resolutions[name] = version; + } + + flattenedPatterns.push(_this9.resolver.collapseAllVersionsOfPackage(name, version)); + } + + // save resolutions to their appropriate root manifest + if (Object.keys(_this9.resolutions).length) { + const manifests = yield _this9.config.getRootManifests(); + + for (const name in _this9.resolutions) { + const version = _this9.resolutions[name]; + + const patterns = _this9.resolver.patternsByPackage[name]; + if (!patterns) { + continue; + } + + let manifest; + for (var _iterator13 = patterns, _isArray13 = Array.isArray(_iterator13), _i13 = 0, _iterator13 = _isArray13 ? _iterator13 : _iterator13[Symbol.iterator]();;) { + var _ref26; + + if (_isArray13) { + if (_i13 >= _iterator13.length) break; + _ref26 = _iterator13[_i13++]; + } else { + _i13 = _iterator13.next(); + if (_i13.done) break; + _ref26 = _i13.value; + } + + const pattern = _ref26; + + manifest = _this9.resolver.getResolvedPattern(pattern); + if (manifest) { + break; + } + } + invariant(manifest, 'expected manifest'); + + const ref = manifest._reference; + invariant(ref, 'expected reference'); + + const object = manifests[ref.registry].object; + object.resolutions = object.resolutions || {}; + object.resolutions[name] = version; + } + + yield _this9.config.saveRootManifests(manifests); + } + + return flattenedPatterns; + })(); + } + + /** + * Remove offline tarballs that are no longer required + */ + + pruneOfflineMirror(lockfile) { + var _this10 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + const mirror = _this10.config.getOfflineMirrorPath(); + if (!mirror) { + return; + } + + const requiredTarballs = new Set(); + for (const dependency in lockfile) { + const resolved = lockfile[dependency].resolved; + if (resolved) { + const basename = path.basename(resolved.split('#')[0]); + if (dependency[0] === '@' && basename[0] !== '@') { + requiredTarballs.add(`${dependency.split('/')[0]}-${basename}`); + } + requiredTarballs.add(basename); + } + } + + const mirrorFiles = yield (_fs || _load_fs()).walk(mirror); + for (var _iterator14 = mirrorFiles, _isArray14 = Array.isArray(_iterator14), _i14 = 0, _iterator14 = _isArray14 ? _iterator14 : _iterator14[Symbol.iterator]();;) { + var _ref27; + + if (_isArray14) { + if (_i14 >= _iterator14.length) break; + _ref27 = _iterator14[_i14++]; + } else { + _i14 = _iterator14.next(); + if (_i14.done) break; + _ref27 = _i14.value; + } + + const file = _ref27; + + const isTarball = path.extname(file.basename) === '.tgz'; + // if using experimental-pack-script-packages-in-mirror flag, don't unlink prebuilt packages + const hasPrebuiltPackage = file.relative.startsWith('prebuilt/'); + if (isTarball && !hasPrebuiltPackage && !requiredTarballs.has(file.basename)) { + yield (_fs || _load_fs()).unlink(file.absolute); + } + } + })(); + } + + /** + * Save updated integrity and lockfiles. + */ + + saveLockfileAndIntegrity(patterns, workspaceLayout) { + var _this11 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + const resolvedPatterns = {}; + Object.keys(_this11.resolver.patterns).forEach(function (pattern) { + if (!workspaceLayout || !workspaceLayout.getManifestByPattern(pattern)) { + resolvedPatterns[pattern] = _this11.resolver.patterns[pattern]; + } + }); + + // TODO this code is duplicated in a few places, need a common way to filter out workspace patterns from lockfile + patterns = patterns.filter(function (p) { + return !workspaceLayout || !workspaceLayout.getManifestByPattern(p); + }); + + const lockfileBasedOnResolver = _this11.lockfile.getLockfile(resolvedPatterns); + + if (_this11.config.pruneOfflineMirror) { + yield _this11.pruneOfflineMirror(lockfileBasedOnResolver); + } + + // write integrity hash + if (!_this11.config.plugnplayEnabled) { + yield _this11.integrityChecker.save(patterns, lockfileBasedOnResolver, _this11.flags, workspaceLayout, _this11.scripts.getArtifacts()); + } + + // --no-lockfile or --pure-lockfile or --frozen-lockfile + if (_this11.flags.lockfile === false || _this11.flags.pureLockfile || _this11.flags.frozenLockfile) { + return; + } + + const lockFileHasAllPatterns = patterns.every(function (p) { + return _this11.lockfile.getLocked(p); + }); + const lockfilePatternsMatch = Object.keys(_this11.lockfile.cache || {}).every(function (p) { + return lockfileBasedOnResolver[p]; + }); + const resolverPatternsAreSameAsInLockfile = Object.keys(lockfileBasedOnResolver).every(function (pattern) { + const manifest = _this11.lockfile.getLocked(pattern); + return manifest && manifest.resolved === lockfileBasedOnResolver[pattern].resolved && deepEqual(manifest.prebuiltVariants, lockfileBasedOnResolver[pattern].prebuiltVariants); + }); + const integrityPatternsAreSameAsInLockfile = Object.keys(lockfileBasedOnResolver).every(function (pattern) { + const existingIntegrityInfo = lockfileBasedOnResolver[pattern].integrity; + if (!existingIntegrityInfo) { + // if this entry does not have an integrity, no need to re-write the lockfile because of it + return true; + } + const manifest = _this11.lockfile.getLocked(pattern); + if (manifest && manifest.integrity) { + const manifestIntegrity = ssri.stringify(manifest.integrity); + return manifestIntegrity === existingIntegrityInfo; + } + return false; + }); + + // remove command is followed by install with force, lockfile will be rewritten in any case then + if (!_this11.flags.force && _this11.lockfile.parseResultType === 'success' && lockFileHasAllPatterns && lockfilePatternsMatch && resolverPatternsAreSameAsInLockfile && integrityPatternsAreSameAsInLockfile && patterns.length) { + return; + } + + // build lockfile location + const loc = path.join(_this11.config.lockfileFolder, (_constants || _load_constants()).LOCKFILE_FILENAME); + + // write lockfile + const lockSource = (0, (_lockfile2 || _load_lockfile2()).stringify)(lockfileBasedOnResolver, false, _this11.config.enableLockfileVersions); + yield (_fs || _load_fs()).writeFilePreservingEol(loc, lockSource); + + _this11._logSuccessSaveLockfile(); + })(); + } + + _logSuccessSaveLockfile() { + this.reporter.success(this.reporter.lang('savedLockfile')); + } + + /** + * Load the dependency graph of the current install. Only does package resolving and wont write to the cwd. + */ + hydrate(ignoreUnusedPatterns) { + var _this12 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + const request = yield _this12.fetchRequestFromCwd([], ignoreUnusedPatterns); + const depRequests = request.requests, + rawPatterns = request.patterns, + ignorePatterns = request.ignorePatterns, + workspaceLayout = request.workspaceLayout; + + + yield _this12.resolver.init(depRequests, { + isFlat: _this12.flags.flat, + isFrozen: _this12.flags.frozenLockfile, + workspaceLayout + }); + yield _this12.flatten(rawPatterns); + _this12.markIgnored(ignorePatterns); + + // fetch packages, should hit cache most of the time + const manifests = yield (_packageFetcher || _load_packageFetcher()).fetch(_this12.resolver.getManifests(), _this12.config); + _this12.resolver.updateManifests(manifests); + yield (_packageCompatibility || _load_packageCompatibility()).check(_this12.resolver.getManifests(), _this12.config, _this12.flags.ignoreEngines); + + // expand minimal manifests + for (var _iterator15 = _this12.resolver.getManifests(), _isArray15 = Array.isArray(_iterator15), _i15 = 0, _iterator15 = _isArray15 ? _iterator15 : _iterator15[Symbol.iterator]();;) { + var _ref28; + + if (_isArray15) { + if (_i15 >= _iterator15.length) break; + _ref28 = _iterator15[_i15++]; + } else { + _i15 = _iterator15.next(); + if (_i15.done) break; + _ref28 = _i15.value; + } + + const manifest = _ref28; + + const ref = manifest._reference; + invariant(ref, 'expected reference'); + const type = ref.remote.type; + // link specifier won't ever hit cache + + let loc = ''; + if (type === 'link') { + continue; + } else if (type === 'workspace') { + if (!ref.remote.reference) { + continue; + } + loc = ref.remote.reference; + } else { + loc = _this12.config.generateModuleCachePath(ref); + } + const newPkg = yield _this12.config.readManifest(loc); + yield _this12.resolver.updateManifest(ref, newPkg); + } + + return request; + })(); + } + + /** + * Check for updates every day and output a nag message if there's a newer version. + */ + + checkUpdate() { + if (this.config.nonInteractive) { + // don't show upgrade dialog on CI or non-TTY terminals + return; + } + + // don't check if disabled + if (this.config.getOption('disable-self-update-check')) { + return; + } + + // only check for updates once a day + const lastUpdateCheck = Number(this.config.getOption('lastUpdateCheck')) || 0; + if (lastUpdateCheck && Date.now() - lastUpdateCheck < ONE_DAY) { + return; + } + + // don't bug for updates on tagged releases + if ((_yarnVersion || _load_yarnVersion()).version.indexOf('-') >= 0) { + return; + } + + this._checkUpdate().catch(() => { + // swallow errors + }); + } + + _checkUpdate() { + var _this13 = this; + + return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () { + let latestVersion = yield _this13.config.requestManager.request({ + url: (_constants || _load_constants()).SELF_UPDATE_VERSION_URL + }); + invariant(typeof latestVersion === 'string', 'expected string'); + latestVersion = latestVersion.trim(); + if (!semver.valid(latestVersion)) { + return; + } + + // ensure we only check for updates periodically + _this13.config.registries.yarn.saveHomeConfig({ + lastUpdateCheck: Date.now() + }); + + if (semver.gt(latestVersion, (_yarnVersion || _load_yarnVersion()).version)) { + const installationMethod = yield (0, (_yarnVersion || _load_yarnVersion()).getInstallationMethod)(); + _this13.maybeOutputUpdate = function () { + _this13.reporter.warn(_this13.reporter.lang('yarnOutdated', latestVersion, (_yarnVersion || _load_yarnVersion()).version)); + + const command = getUpdateCommand(installationMethod); + if (command) { + _this13.reporter.info(_this13.reporter.lang('yarnOutdatedCommand')); + _this13.reporter.command(command); + } else { + const installer = getUpdateInstaller(installationMethod); + if (installer) { + _this13.reporter.info(_this13.reporter.lang('yarnOutdatedInstaller', installer)); + } + } + }; + } + })(); + } + + /** + * Method to override with a possible upgrade message. + */ + + maybeOutputUpdate() {} +} + +exports.Install = Install; +function hasWrapper(commander, args) { + return true; +} + +function setFlags(commander) { + commander.description('Yarn install is used to install all dependencies for a project.'); + commander.usage('install [flags]'); + commander.option('-A, --audit', 'Run vulnerability audit on installed packages'); + commander.option('-g, --global', 'DEPRECATED'); + commander.option('-S, --save', 'DEPRECATED - save package to your `dependencies`'); + commander.option('-D, --save-dev', 'DEPRECATED - save package to your `devDependencies`'); + commander.option('-P, --save-peer', 'DEPRECATED - save package to your `peerDependencies`'); + commander.option('-O, --save-optional', 'DEPRECATED - save package to your `optionalDependencies`'); + commander.option('-E, --save-exact', 'DEPRECATED'); + commander.option('-T, --save-tilde', 'DEPRECATED'); +} + +/***/ }), +/* 35 */ +/***/ (function(module, exports, __webpack_require__) { + +var isObject = __webpack_require__(52); +module.exports = function (it) { + if (!isObject(it)) throw TypeError(it + ' is not an object!'); + return it; +}; + + +/***/ }), +/* 36 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return SubjectSubscriber; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return Subject; }); +/* unused harmony export AnonymousSubject */ +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0_tslib__ = __webpack_require__(1); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__Observable__ = __webpack_require__(12); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__Subscriber__ = __webpack_require__(7); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__Subscription__ = __webpack_require__(25); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4__util_ObjectUnsubscribedError__ = __webpack_require__(189); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_5__SubjectSubscription__ = __webpack_require__(422); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_6__internal_symbol_rxSubscriber__ = __webpack_require__(321); +/** PURE_IMPORTS_START tslib,_Observable,_Subscriber,_Subscription,_util_ObjectUnsubscribedError,_SubjectSubscription,_internal_symbol_rxSubscriber PURE_IMPORTS_END */ + + + + + + + +var SubjectSubscriber = /*@__PURE__*/ (function (_super) { + __WEBPACK_IMPORTED_MODULE_0_tslib__["a" /* __extends */](SubjectSubscriber, _super); + function SubjectSubscriber(destination) { + var _this = _super.call(this, destination) || this; + _this.destination = destination; + return _this; + } + return SubjectSubscriber; +}(__WEBPACK_IMPORTED_MODULE_2__Subscriber__["a" /* Subscriber */])); + +var Subject = /*@__PURE__*/ (function (_super) { + __WEBPACK_IMPORTED_MODULE_0_tslib__["a" /* __extends */](Subject, _super); + function Subject() { + var _this = _super.call(this) || this; + _this.observers = []; + _this.closed = false; + _this.isStopped = false; + _this.hasError = false; + _this.thrownError = null; + return _this; + } + Subject.prototype[__WEBPACK_IMPORTED_MODULE_6__internal_symbol_rxSubscriber__["a" /* rxSubscriber */]] = function () { + return new SubjectSubscriber(this); + }; + Subject.prototype.lift = function (operator) { + var subject = new AnonymousSubject(this, this); + subject.operator = operator; + return subject; + }; + Subject.prototype.next = function (value) { + if (this.closed) { + throw new __WEBPACK_IMPORTED_MODULE_4__util_ObjectUnsubscribedError__["a" /* ObjectUnsubscribedError */](); + } + if (!this.isStopped) { + var observers = this.observers; + var len = observers.length; + var copy = observers.slice(); + for (var i = 0; i < len; i++) { + copy[i].next(value); + } + } + }; + Subject.prototype.error = function (err) { + if (this.closed) { + throw new __WEBPACK_IMPORTED_MODULE_4__util_ObjectUnsubscribedError__["a" /* ObjectUnsubscribedError */](); + } + this.hasError = true; + this.thrownError = err; + this.isStopped = true; + var observers = this.observers; + var len = observers.length; + var copy = observers.slice(); + for (var i = 0; i < len; i++) { + copy[i].error(err); + } + this.observers.length = 0; + }; + Subject.prototype.complete = function () { + if (this.closed) { + throw new __WEBPACK_IMPORTED_MODULE_4__util_ObjectUnsubscribedError__["a" /* ObjectUnsubscribedError */](); + } + this.isStopped = true; + var observers = this.observers; + var len = observers.length; + var copy = observers.slice(); + for (var i = 0; i < len; i++) { + copy[i].complete(); + } + this.observers.length = 0; + }; + Subject.prototype.unsubscribe = function () { + this.isStopped = true; + this.closed = true; + this.observers = null; + }; + Subject.prototype._trySubscribe = function (subscriber) { + if (this.closed) { + throw new __WEBPACK_IMPORTED_MODULE_4__util_ObjectUnsubscribedError__["a" /* ObjectUnsubscribedError */](); + } + else { + return _super.prototype._trySubscribe.call(this, subscriber); + } + }; + Subject.prototype._subscribe = function (subscriber) { + if (this.closed) { + throw new __WEBPACK_IMPORTED_MODULE_4__util_ObjectUnsubscribedError__["a" /* ObjectUnsubscribedError */](); + } + else if (this.hasError) { + subscriber.error(this.thrownError); + return __WEBPACK_IMPORTED_MODULE_3__Subscription__["a" /* Subscription */].EMPTY; + } + else if (this.isStopped) { + subscriber.complete(); + return __WEBPACK_IMPORTED_MODULE_3__Subscription__["a" /* Subscription */].EMPTY; + } + else { + this.observers.push(subscriber); + return new __WEBPACK_IMPORTED_MODULE_5__SubjectSubscription__["a" /* SubjectSubscription */](this, subscriber); + } + }; + Subject.prototype.asObservable = function () { + var observable = new __WEBPACK_IMPORTED_MODULE_1__Observable__["a" /* Observable */](); + observable.source = this; + return observable; + }; + Subject.create = function (destination, source) { + return new AnonymousSubject(destination, source); + }; + return Subject; +}(__WEBPACK_IMPORTED_MODULE_1__Observable__["a" /* Observable */])); + +var AnonymousSubject = /*@__PURE__*/ (function (_super) { + __WEBPACK_IMPORTED_MODULE_0_tslib__["a" /* __extends */](AnonymousSubject, _super); + function AnonymousSubject(destination, source) { + var _this = _super.call(this) || this; + _this.destination = destination; + _this.source = source; + return _this; + } + AnonymousSubject.prototype.next = function (value) { + var destination = this.destination; + if (destination && destination.next) { + destination.next(value); + } + }; + AnonymousSubject.prototype.error = function (err) { + var destination = this.destination; + if (destination && destination.error) { + this.destination.error(err); + } + }; + AnonymousSubject.prototype.complete = function () { + var destination = this.destination; + if (destination && destination.complete) { + this.destination.complete(); + } + }; + AnonymousSubject.prototype._subscribe = function (subscriber) { + var source = this.source; + if (source) { + return this.source.subscribe(subscriber); + } + else { + return __WEBPACK_IMPORTED_MODULE_3__Subscription__["a" /* Subscription */].EMPTY; + } + }; + return AnonymousSubject; +}(Subject)); + +//# sourceMappingURL=Subject.js.map + + +/***/ }), +/* 37 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.normalizePattern = normalizePattern; + +/** + * Explode and normalize a pattern into its name and range. + */ + +function normalizePattern(pattern) { + let hasVersion = false; + let range = 'latest'; + let name = pattern; + + // if we're a scope then remove the @ and add it back later + let isScoped = false; + if (name[0] === '@') { + isScoped = true; + name = name.slice(1); + } + + // take first part as the name + const parts = name.split('@'); + if (parts.length > 1) { + name = parts.shift(); + range = parts.join('@'); + + if (range) { + hasVersion = true; + } else { + range = '*'; + } + } + + // add back @ scope suffix + if (isScoped) { + name = `@${name}`; + } + + return { name, range, hasVersion }; +} + +/***/ }), +/* 38 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(module) {var __WEBPACK_AMD_DEFINE_RESULT__;/** + * @license + * Lodash + * Copyright JS Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ +;(function() { + + /** Used as a safe reference for `undefined` in pre-ES5 environments. */ + var undefined; + + /** Used as the semantic version number. */ + var VERSION = '4.17.10'; + + /** Used as the size to enable large array optimizations. */ + var LARGE_ARRAY_SIZE = 200; + + /** Error message constants. */ + var CORE_ERROR_TEXT = 'Unsupported core-js use. Try https://npms.io/search?q=ponyfill.', + FUNC_ERROR_TEXT = 'Expected a function'; + + /** Used to stand-in for `undefined` hash values. */ + var HASH_UNDEFINED = '__lodash_hash_undefined__'; + + /** Used as the maximum memoize cache size. */ + var MAX_MEMOIZE_SIZE = 500; + + /** Used as the internal argument placeholder. */ + var PLACEHOLDER = '__lodash_placeholder__'; + + /** Used to compose bitmasks for cloning. */ + var CLONE_DEEP_FLAG = 1, + CLONE_FLAT_FLAG = 2, + CLONE_SYMBOLS_FLAG = 4; + + /** Used to compose bitmasks for value comparisons. */ + var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; + + /** Used to compose bitmasks for function metadata. */ + var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_BOUND_FLAG = 4, + WRAP_CURRY_FLAG = 8, + WRAP_CURRY_RIGHT_FLAG = 16, + WRAP_PARTIAL_FLAG = 32, + WRAP_PARTIAL_RIGHT_FLAG = 64, + WRAP_ARY_FLAG = 128, + WRAP_REARG_FLAG = 256, + WRAP_FLIP_FLAG = 512; + + /** Used as default options for `_.truncate`. */ + var DEFAULT_TRUNC_LENGTH = 30, + DEFAULT_TRUNC_OMISSION = '...'; + + /** Used to detect hot functions by number of calls within a span of milliseconds. */ + var HOT_COUNT = 800, + HOT_SPAN = 16; + + /** Used to indicate the type of lazy iteratees. */ + var LAZY_FILTER_FLAG = 1, + LAZY_MAP_FLAG = 2, + LAZY_WHILE_FLAG = 3; + + /** Used as references for various `Number` constants. */ + var INFINITY = 1 / 0, + MAX_SAFE_INTEGER = 9007199254740991, + MAX_INTEGER = 1.7976931348623157e+308, + NAN = 0 / 0; + + /** Used as references for the maximum length and index of an array. */ + var MAX_ARRAY_LENGTH = 4294967295, + MAX_ARRAY_INDEX = MAX_ARRAY_LENGTH - 1, + HALF_MAX_ARRAY_LENGTH = MAX_ARRAY_LENGTH >>> 1; + + /** Used to associate wrap methods with their bit flags. */ + var wrapFlags = [ + ['ary', WRAP_ARY_FLAG], + ['bind', WRAP_BIND_FLAG], + ['bindKey', WRAP_BIND_KEY_FLAG], + ['curry', WRAP_CURRY_FLAG], + ['curryRight', WRAP_CURRY_RIGHT_FLAG], + ['flip', WRAP_FLIP_FLAG], + ['partial', WRAP_PARTIAL_FLAG], + ['partialRight', WRAP_PARTIAL_RIGHT_FLAG], + ['rearg', WRAP_REARG_FLAG] + ]; + + /** `Object#toString` result references. */ + var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + asyncTag = '[object AsyncFunction]', + boolTag = '[object Boolean]', + dateTag = '[object Date]', + domExcTag = '[object DOMException]', + errorTag = '[object Error]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + mapTag = '[object Map]', + numberTag = '[object Number]', + nullTag = '[object Null]', + objectTag = '[object Object]', + promiseTag = '[object Promise]', + proxyTag = '[object Proxy]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + symbolTag = '[object Symbol]', + undefinedTag = '[object Undefined]', + weakMapTag = '[object WeakMap]', + weakSetTag = '[object WeakSet]'; + + var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]', + float32Tag = '[object Float32Array]', + float64Tag = '[object Float64Array]', + int8Tag = '[object Int8Array]', + int16Tag = '[object Int16Array]', + int32Tag = '[object Int32Array]', + uint8Tag = '[object Uint8Array]', + uint8ClampedTag = '[object Uint8ClampedArray]', + uint16Tag = '[object Uint16Array]', + uint32Tag = '[object Uint32Array]'; + + /** Used to match empty string literals in compiled template source. */ + var reEmptyStringLeading = /\b__p \+= '';/g, + reEmptyStringMiddle = /\b(__p \+=) '' \+/g, + reEmptyStringTrailing = /(__e\(.*?\)|\b__t\)) \+\n'';/g; + + /** Used to match HTML entities and HTML characters. */ + var reEscapedHtml = /&(?:amp|lt|gt|quot|#39);/g, + reUnescapedHtml = /[&<>"']/g, + reHasEscapedHtml = RegExp(reEscapedHtml.source), + reHasUnescapedHtml = RegExp(reUnescapedHtml.source); + + /** Used to match template delimiters. */ + var reEscape = /<%-([\s\S]+?)%>/g, + reEvaluate = /<%([\s\S]+?)%>/g, + reInterpolate = /<%=([\s\S]+?)%>/g; + + /** Used to match property names within property paths. */ + var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, + reIsPlainProp = /^\w*$/, + rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; + + /** + * Used to match `RegExp` + * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). + */ + var reRegExpChar = /[\\^$.*+?()[\]{}|]/g, + reHasRegExpChar = RegExp(reRegExpChar.source); + + /** Used to match leading and trailing whitespace. */ + var reTrim = /^\s+|\s+$/g, + reTrimStart = /^\s+/, + reTrimEnd = /\s+$/; + + /** Used to match wrap detail comments. */ + var reWrapComment = /\{(?:\n\/\* \[wrapped with .+\] \*\/)?\n?/, + reWrapDetails = /\{\n\/\* \[wrapped with (.+)\] \*/, + reSplitDetails = /,? & /; + + /** Used to match words composed of alphanumeric characters. */ + var reAsciiWord = /[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g; + + /** Used to match backslashes in property paths. */ + var reEscapeChar = /\\(\\)?/g; + + /** + * Used to match + * [ES template delimiters](http://ecma-international.org/ecma-262/7.0/#sec-template-literal-lexical-components). + */ + var reEsTemplate = /\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g; + + /** Used to match `RegExp` flags from their coerced string values. */ + var reFlags = /\w*$/; + + /** Used to detect bad signed hexadecimal string values. */ + var reIsBadHex = /^[-+]0x[0-9a-f]+$/i; + + /** Used to detect binary string values. */ + var reIsBinary = /^0b[01]+$/i; + + /** Used to detect host constructors (Safari). */ + var reIsHostCtor = /^\[object .+?Constructor\]$/; + + /** Used to detect octal string values. */ + var reIsOctal = /^0o[0-7]+$/i; + + /** Used to detect unsigned integer values. */ + var reIsUint = /^(?:0|[1-9]\d*)$/; + + /** Used to match Latin Unicode letters (excluding mathematical operators). */ + var reLatin = /[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g; + + /** Used to ensure capturing order of template delimiters. */ + var reNoMatch = /($^)/; + + /** Used to match unescaped characters in compiled string literals. */ + var reUnescapedString = /['\n\r\u2028\u2029\\]/g; + + /** Used to compose unicode character classes. */ + var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange, + rsDingbatRange = '\\u2700-\\u27bf', + rsLowerRange = 'a-z\\xdf-\\xf6\\xf8-\\xff', + rsMathOpRange = '\\xac\\xb1\\xd7\\xf7', + rsNonCharRange = '\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf', + rsPunctuationRange = '\\u2000-\\u206f', + rsSpaceRange = ' \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000', + rsUpperRange = 'A-Z\\xc0-\\xd6\\xd8-\\xde', + rsVarRange = '\\ufe0e\\ufe0f', + rsBreakRange = rsMathOpRange + rsNonCharRange + rsPunctuationRange + rsSpaceRange; + + /** Used to compose unicode capture groups. */ + var rsApos = "['\u2019]", + rsAstral = '[' + rsAstralRange + ']', + rsBreak = '[' + rsBreakRange + ']', + rsCombo = '[' + rsComboRange + ']', + rsDigits = '\\d+', + rsDingbat = '[' + rsDingbatRange + ']', + rsLower = '[' + rsLowerRange + ']', + rsMisc = '[^' + rsAstralRange + rsBreakRange + rsDigits + rsDingbatRange + rsLowerRange + rsUpperRange + ']', + rsFitz = '\\ud83c[\\udffb-\\udfff]', + rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')', + rsNonAstral = '[^' + rsAstralRange + ']', + rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}', + rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]', + rsUpper = '[' + rsUpperRange + ']', + rsZWJ = '\\u200d'; + + /** Used to compose unicode regexes. */ + var rsMiscLower = '(?:' + rsLower + '|' + rsMisc + ')', + rsMiscUpper = '(?:' + rsUpper + '|' + rsMisc + ')', + rsOptContrLower = '(?:' + rsApos + '(?:d|ll|m|re|s|t|ve))?', + rsOptContrUpper = '(?:' + rsApos + '(?:D|LL|M|RE|S|T|VE))?', + reOptMod = rsModifier + '?', + rsOptVar = '[' + rsVarRange + ']?', + rsOptJoin = '(?:' + rsZWJ + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*', + rsOrdLower = '\\d*(?:1st|2nd|3rd|(?![123])\\dth)(?=\\b|[A-Z_])', + rsOrdUpper = '\\d*(?:1ST|2ND|3RD|(?![123])\\dTH)(?=\\b|[a-z_])', + rsSeq = rsOptVar + reOptMod + rsOptJoin, + rsEmoji = '(?:' + [rsDingbat, rsRegional, rsSurrPair].join('|') + ')' + rsSeq, + rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; + + /** Used to match apostrophes. */ + var reApos = RegExp(rsApos, 'g'); + + /** + * Used to match [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks) and + * [combining diacritical marks for symbols](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks_for_Symbols). + */ + var reComboMark = RegExp(rsCombo, 'g'); + + /** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ + var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); + + /** Used to match complex or compound words. */ + var reUnicodeWord = RegExp([ + rsUpper + '?' + rsLower + '+' + rsOptContrLower + '(?=' + [rsBreak, rsUpper, '$'].join('|') + ')', + rsMiscUpper + '+' + rsOptContrUpper + '(?=' + [rsBreak, rsUpper + rsMiscLower, '$'].join('|') + ')', + rsUpper + '?' + rsMiscLower + '+' + rsOptContrLower, + rsUpper + '+' + rsOptContrUpper, + rsOrdUpper, + rsOrdLower, + rsDigits, + rsEmoji + ].join('|'), 'g'); + + /** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ + var reHasUnicode = RegExp('[' + rsZWJ + rsAstralRange + rsComboRange + rsVarRange + ']'); + + /** Used to detect strings that need a more robust regexp to match words. */ + var reHasUnicodeWord = /[a-z][A-Z]|[A-Z]{2,}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/; + + /** Used to assign default `context` object properties. */ + var contextProps = [ + 'Array', 'Buffer', 'DataView', 'Date', 'Error', 'Float32Array', 'Float64Array', + 'Function', 'Int8Array', 'Int16Array', 'Int32Array', 'Map', 'Math', 'Object', + 'Promise', 'RegExp', 'Set', 'String', 'Symbol', 'TypeError', 'Uint8Array', + 'Uint8ClampedArray', 'Uint16Array', 'Uint32Array', 'WeakMap', + '_', 'clearTimeout', 'isFinite', 'parseInt', 'setTimeout' + ]; + + /** Used to make template sourceURLs easier to identify. */ + var templateCounter = -1; + + /** Used to identify `toStringTag` values of typed arrays. */ + var typedArrayTags = {}; + typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = + typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = + typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = + typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = + typedArrayTags[uint32Tag] = true; + typedArrayTags[argsTag] = typedArrayTags[arrayTag] = + typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = + typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = + typedArrayTags[errorTag] = typedArrayTags[funcTag] = + typedArrayTags[mapTag] = typedArrayTags[numberTag] = + typedArrayTags[objectTag] = typedArrayTags[regexpTag] = + typedArrayTags[setTag] = typedArrayTags[stringTag] = + typedArrayTags[weakMapTag] = false; + + /** Used to identify `toStringTag` values supported by `_.clone`. */ + var cloneableTags = {}; + cloneableTags[argsTag] = cloneableTags[arrayTag] = + cloneableTags[arrayBufferTag] = cloneableTags[dataViewTag] = + cloneableTags[boolTag] = cloneableTags[dateTag] = + cloneableTags[float32Tag] = cloneableTags[float64Tag] = + cloneableTags[int8Tag] = cloneableTags[int16Tag] = + cloneableTags[int32Tag] = cloneableTags[mapTag] = + cloneableTags[numberTag] = cloneableTags[objectTag] = + cloneableTags[regexpTag] = cloneableTags[setTag] = + cloneableTags[stringTag] = cloneableTags[symbolTag] = + cloneableTags[uint8Tag] = cloneableTags[uint8ClampedTag] = + cloneableTags[uint16Tag] = cloneableTags[uint32Tag] = true; + cloneableTags[errorTag] = cloneableTags[funcTag] = + cloneableTags[weakMapTag] = false; + + /** Used to map Latin Unicode letters to basic Latin letters. */ + var deburredLetters = { + // Latin-1 Supplement block. + '\xc0': 'A', '\xc1': 'A', '\xc2': 'A', '\xc3': 'A', '\xc4': 'A', '\xc5': 'A', + '\xe0': 'a', '\xe1': 'a', '\xe2': 'a', '\xe3': 'a', '\xe4': 'a', '\xe5': 'a', + '\xc7': 'C', '\xe7': 'c', + '\xd0': 'D', '\xf0': 'd', + '\xc8': 'E', '\xc9': 'E', '\xca': 'E', '\xcb': 'E', + '\xe8': 'e', '\xe9': 'e', '\xea': 'e', '\xeb': 'e', + '\xcc': 'I', '\xcd': 'I', '\xce': 'I', '\xcf': 'I', + '\xec': 'i', '\xed': 'i', '\xee': 'i', '\xef': 'i', + '\xd1': 'N', '\xf1': 'n', + '\xd2': 'O', '\xd3': 'O', '\xd4': 'O', '\xd5': 'O', '\xd6': 'O', '\xd8': 'O', + '\xf2': 'o', '\xf3': 'o', '\xf4': 'o', '\xf5': 'o', '\xf6': 'o', '\xf8': 'o', + '\xd9': 'U', '\xda': 'U', '\xdb': 'U', '\xdc': 'U', + '\xf9': 'u', '\xfa': 'u', '\xfb': 'u', '\xfc': 'u', + '\xdd': 'Y', '\xfd': 'y', '\xff': 'y', + '\xc6': 'Ae', '\xe6': 'ae', + '\xde': 'Th', '\xfe': 'th', + '\xdf': 'ss', + // Latin Extended-A block. + '\u0100': 'A', '\u0102': 'A', '\u0104': 'A', + '\u0101': 'a', '\u0103': 'a', '\u0105': 'a', + '\u0106': 'C', '\u0108': 'C', '\u010a': 'C', '\u010c': 'C', + '\u0107': 'c', '\u0109': 'c', '\u010b': 'c', '\u010d': 'c', + '\u010e': 'D', '\u0110': 'D', '\u010f': 'd', '\u0111': 'd', + '\u0112': 'E', '\u0114': 'E', '\u0116': 'E', '\u0118': 'E', '\u011a': 'E', + '\u0113': 'e', '\u0115': 'e', '\u0117': 'e', '\u0119': 'e', '\u011b': 'e', + '\u011c': 'G', '\u011e': 'G', '\u0120': 'G', '\u0122': 'G', + '\u011d': 'g', '\u011f': 'g', '\u0121': 'g', '\u0123': 'g', + '\u0124': 'H', '\u0126': 'H', '\u0125': 'h', '\u0127': 'h', + '\u0128': 'I', '\u012a': 'I', '\u012c': 'I', '\u012e': 'I', '\u0130': 'I', + '\u0129': 'i', '\u012b': 'i', '\u012d': 'i', '\u012f': 'i', '\u0131': 'i', + '\u0134': 'J', '\u0135': 'j', + '\u0136': 'K', '\u0137': 'k', '\u0138': 'k', + '\u0139': 'L', '\u013b': 'L', '\u013d': 'L', '\u013f': 'L', '\u0141': 'L', + '\u013a': 'l', '\u013c': 'l', '\u013e': 'l', '\u0140': 'l', '\u0142': 'l', + '\u0143': 'N', '\u0145': 'N', '\u0147': 'N', '\u014a': 'N', + '\u0144': 'n', '\u0146': 'n', '\u0148': 'n', '\u014b': 'n', + '\u014c': 'O', '\u014e': 'O', '\u0150': 'O', + '\u014d': 'o', '\u014f': 'o', '\u0151': 'o', + '\u0154': 'R', '\u0156': 'R', '\u0158': 'R', + '\u0155': 'r', '\u0157': 'r', '\u0159': 'r', + '\u015a': 'S', '\u015c': 'S', '\u015e': 'S', '\u0160': 'S', + '\u015b': 's', '\u015d': 's', '\u015f': 's', '\u0161': 's', + '\u0162': 'T', '\u0164': 'T', '\u0166': 'T', + '\u0163': 't', '\u0165': 't', '\u0167': 't', + '\u0168': 'U', '\u016a': 'U', '\u016c': 'U', '\u016e': 'U', '\u0170': 'U', '\u0172': 'U', + '\u0169': 'u', '\u016b': 'u', '\u016d': 'u', '\u016f': 'u', '\u0171': 'u', '\u0173': 'u', + '\u0174': 'W', '\u0175': 'w', + '\u0176': 'Y', '\u0177': 'y', '\u0178': 'Y', + '\u0179': 'Z', '\u017b': 'Z', '\u017d': 'Z', + '\u017a': 'z', '\u017c': 'z', '\u017e': 'z', + '\u0132': 'IJ', '\u0133': 'ij', + '\u0152': 'Oe', '\u0153': 'oe', + '\u0149': "'n", '\u017f': 's' + }; + + /** Used to map characters to HTML entities. */ + var htmlEscapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' + }; + + /** Used to map HTML entities to characters. */ + var htmlUnescapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + ''': "'" + }; + + /** Used to escape characters for inclusion in compiled string literals. */ + var stringEscapes = { + '\\': '\\', + "'": "'", + '\n': 'n', + '\r': 'r', + '\u2028': 'u2028', + '\u2029': 'u2029' + }; + + /** Built-in method references without a dependency on `root`. */ + var freeParseFloat = parseFloat, + freeParseInt = parseInt; + + /** Detect free variable `global` from Node.js. */ + var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + + /** Detect free variable `self`. */ + var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + + /** Used as a reference to the global object. */ + var root = freeGlobal || freeSelf || Function('return this')(); + + /** Detect free variable `exports`. */ + var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; + + /** Detect free variable `module`. */ + var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; + + /** Detect the popular CommonJS extension `module.exports`. */ + var moduleExports = freeModule && freeModule.exports === freeExports; + + /** Detect free variable `process` from Node.js. */ + var freeProcess = moduleExports && freeGlobal.process; + + /** Used to access faster Node.js helpers. */ + var nodeUtil = (function() { + try { + // Use `util.types` for Node.js 10+. + var types = freeModule && freeModule.require && freeModule.require('util').types; + + if (types) { + return types; + } + + // Legacy `process.binding('util')` for Node.js < 10. + return freeProcess && freeProcess.binding && freeProcess.binding('util'); + } catch (e) {} + }()); + + /* Node.js helper references. */ + var nodeIsArrayBuffer = nodeUtil && nodeUtil.isArrayBuffer, + nodeIsDate = nodeUtil && nodeUtil.isDate, + nodeIsMap = nodeUtil && nodeUtil.isMap, + nodeIsRegExp = nodeUtil && nodeUtil.isRegExp, + nodeIsSet = nodeUtil && nodeUtil.isSet, + nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; + + /*--------------------------------------------------------------------------*/ + + /** + * A faster alternative to `Function#apply`, this function invokes `func` + * with the `this` binding of `thisArg` and the arguments of `args`. + * + * @private + * @param {Function} func The function to invoke. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} args The arguments to invoke `func` with. + * @returns {*} Returns the result of `func`. + */ + function apply(func, thisArg, args) { + switch (args.length) { + case 0: return func.call(thisArg); + case 1: return func.call(thisArg, args[0]); + case 2: return func.call(thisArg, args[0], args[1]); + case 3: return func.call(thisArg, args[0], args[1], args[2]); + } + return func.apply(thisArg, args); + } + + /** + * A specialized version of `baseAggregator` for arrays. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ + function arrayAggregator(array, setter, iteratee, accumulator) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + var value = array[index]; + setter(accumulator, value, iteratee(value), array); + } + return accumulator; + } + + /** + * A specialized version of `_.forEach` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ + function arrayEach(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (iteratee(array[index], index, array) === false) { + break; + } + } + return array; + } + + /** + * A specialized version of `_.forEachRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ + function arrayEachRight(array, iteratee) { + var length = array == null ? 0 : array.length; + + while (length--) { + if (iteratee(array[length], length, array) === false) { + break; + } + } + return array; + } + + /** + * A specialized version of `_.every` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + */ + function arrayEvery(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (!predicate(array[index], index, array)) { + return false; + } + } + return true; + } + + /** + * A specialized version of `_.filter` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function arrayFilter(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result[resIndex++] = value; + } + } + return result; + } + + /** + * A specialized version of `_.includes` for arrays without support for + * specifying an index to search from. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ + function arrayIncludes(array, value) { + var length = array == null ? 0 : array.length; + return !!length && baseIndexOf(array, value, 0) > -1; + } + + /** + * This function is like `arrayIncludes` except that it accepts a comparator. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @param {Function} comparator The comparator invoked per element. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ + function arrayIncludesWith(array, value, comparator) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (comparator(value, array[index])) { + return true; + } + } + return false; + } + + /** + * A specialized version of `_.map` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function arrayMap(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length, + result = Array(length); + + while (++index < length) { + result[index] = iteratee(array[index], index, array); + } + return result; + } + + /** + * Appends the elements of `values` to `array`. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to append. + * @returns {Array} Returns `array`. + */ + function arrayPush(array, values) { + var index = -1, + length = values.length, + offset = array.length; + + while (++index < length) { + array[offset + index] = values[index]; + } + return array; + } + + /** + * A specialized version of `_.reduce` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the first element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ + function arrayReduce(array, iteratee, accumulator, initAccum) { + var index = -1, + length = array == null ? 0 : array.length; + + if (initAccum && length) { + accumulator = array[++index]; + } + while (++index < length) { + accumulator = iteratee(accumulator, array[index], index, array); + } + return accumulator; + } + + /** + * A specialized version of `_.reduceRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the last element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ + function arrayReduceRight(array, iteratee, accumulator, initAccum) { + var length = array == null ? 0 : array.length; + if (initAccum && length) { + accumulator = array[--length]; + } + while (length--) { + accumulator = iteratee(accumulator, array[length], length, array); + } + return accumulator; + } + + /** + * A specialized version of `_.some` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function arraySome(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (predicate(array[index], index, array)) { + return true; + } + } + return false; + } + + /** + * Gets the size of an ASCII `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ + var asciiSize = baseProperty('length'); + + /** + * Converts an ASCII `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function asciiToArray(string) { + return string.split(''); + } + + /** + * Splits an ASCII `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ + function asciiWords(string) { + return string.match(reAsciiWord) || []; + } + + /** + * The base implementation of methods like `_.findKey` and `_.findLastKey`, + * without support for iteratee shorthands, which iterates over `collection` + * using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the found element or its key, else `undefined`. + */ + function baseFindKey(collection, predicate, eachFunc) { + var result; + eachFunc(collection, function(value, key, collection) { + if (predicate(value, key, collection)) { + result = key; + return false; + } + }); + return result; + } + + /** + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); + + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; + } + } + return -1; + } + + /** + * The base implementation of `_.indexOf` without `fromIndex` bounds checks. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseIndexOf(array, value, fromIndex) { + return value === value + ? strictIndexOf(array, value, fromIndex) + : baseFindIndex(array, baseIsNaN, fromIndex); + } + + /** + * This function is like `baseIndexOf` except that it accepts a comparator. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @param {Function} comparator The comparator invoked per element. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseIndexOfWith(array, value, fromIndex, comparator) { + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (comparator(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * The base implementation of `_.isNaN` without support for number objects. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + */ + function baseIsNaN(value) { + return value !== value; + } + + /** + * The base implementation of `_.mean` and `_.meanBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the mean. + */ + function baseMean(array, iteratee) { + var length = array == null ? 0 : array.length; + return length ? (baseSum(array, iteratee) / length) : NAN; + } + + /** + * The base implementation of `_.property` without support for deep paths. + * + * @private + * @param {string} key The key of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function baseProperty(key) { + return function(object) { + return object == null ? undefined : object[key]; + }; + } + + /** + * The base implementation of `_.propertyOf` without support for deep paths. + * + * @private + * @param {Object} object The object to query. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyOf(object) { + return function(key) { + return object == null ? undefined : object[key]; + }; + } + + /** + * The base implementation of `_.reduce` and `_.reduceRight`, without support + * for iteratee shorthands, which iterates over `collection` using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} accumulator The initial value. + * @param {boolean} initAccum Specify using the first or last element of + * `collection` as the initial value. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the accumulated value. + */ + function baseReduce(collection, iteratee, accumulator, initAccum, eachFunc) { + eachFunc(collection, function(value, index, collection) { + accumulator = initAccum + ? (initAccum = false, value) + : iteratee(accumulator, value, index, collection); + }); + return accumulator; + } + + /** + * The base implementation of `_.sortBy` which uses `comparer` to define the + * sort order of `array` and replaces criteria objects with their corresponding + * values. + * + * @private + * @param {Array} array The array to sort. + * @param {Function} comparer The function to define sort order. + * @returns {Array} Returns `array`. + */ + function baseSortBy(array, comparer) { + var length = array.length; + + array.sort(comparer); + while (length--) { + array[length] = array[length].value; + } + return array; + } + + /** + * The base implementation of `_.sum` and `_.sumBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the sum. + */ + function baseSum(array, iteratee) { + var result, + index = -1, + length = array.length; + + while (++index < length) { + var current = iteratee(array[index]); + if (current !== undefined) { + result = result === undefined ? current : (result + current); + } + } + return result; + } + + /** + * The base implementation of `_.times` without support for iteratee shorthands + * or max array length checks. + * + * @private + * @param {number} n The number of times to invoke `iteratee`. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the array of results. + */ + function baseTimes(n, iteratee) { + var index = -1, + result = Array(n); + + while (++index < n) { + result[index] = iteratee(index); + } + return result; + } + + /** + * The base implementation of `_.toPairs` and `_.toPairsIn` which creates an array + * of key-value pairs for `object` corresponding to the property names of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the key-value pairs. + */ + function baseToPairs(object, props) { + return arrayMap(props, function(key) { + return [key, object[key]]; + }); + } + + /** + * The base implementation of `_.unary` without support for storing metadata. + * + * @private + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + */ + function baseUnary(func) { + return function(value) { + return func(value); + }; + } + + /** + * The base implementation of `_.values` and `_.valuesIn` which creates an + * array of `object` property values corresponding to the property names + * of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the array of property values. + */ + function baseValues(object, props) { + return arrayMap(props, function(key) { + return object[key]; + }); + } + + /** + * Checks if a `cache` value for `key` exists. + * + * @private + * @param {Object} cache The cache to query. + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function cacheHas(cache, key) { + return cache.has(key); + } + + /** + * Used by `_.trim` and `_.trimStart` to get the index of the first string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the first unmatched string symbol. + */ + function charsStartIndex(strSymbols, chrSymbols) { + var index = -1, + length = strSymbols.length; + + while (++index < length && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; + } + + /** + * Used by `_.trim` and `_.trimEnd` to get the index of the last string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the last unmatched string symbol. + */ + function charsEndIndex(strSymbols, chrSymbols) { + var index = strSymbols.length; + + while (index-- && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; + } + + /** + * Gets the number of `placeholder` occurrences in `array`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} placeholder The placeholder to search for. + * @returns {number} Returns the placeholder count. + */ + function countHolders(array, placeholder) { + var length = array.length, + result = 0; + + while (length--) { + if (array[length] === placeholder) { + ++result; + } + } + return result; + } + + /** + * Used by `_.deburr` to convert Latin-1 Supplement and Latin Extended-A + * letters to basic Latin letters. + * + * @private + * @param {string} letter The matched letter to deburr. + * @returns {string} Returns the deburred letter. + */ + var deburrLetter = basePropertyOf(deburredLetters); + + /** + * Used by `_.escape` to convert characters to HTML entities. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + var escapeHtmlChar = basePropertyOf(htmlEscapes); + + /** + * Used by `_.template` to escape characters for inclusion in compiled string literals. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + function escapeStringChar(chr) { + return '\\' + stringEscapes[chr]; + } + + /** + * Gets the value at `key` of `object`. + * + * @private + * @param {Object} [object] The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ + function getValue(object, key) { + return object == null ? undefined : object[key]; + } + + /** + * Checks if `string` contains Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a symbol is found, else `false`. + */ + function hasUnicode(string) { + return reHasUnicode.test(string); + } + + /** + * Checks if `string` contains a word composed of Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a word is found, else `false`. + */ + function hasUnicodeWord(string) { + return reHasUnicodeWord.test(string); + } + + /** + * Converts `iterator` to an array. + * + * @private + * @param {Object} iterator The iterator to convert. + * @returns {Array} Returns the converted array. + */ + function iteratorToArray(iterator) { + var data, + result = []; + + while (!(data = iterator.next()).done) { + result.push(data.value); + } + return result; + } + + /** + * Converts `map` to its key-value pairs. + * + * @private + * @param {Object} map The map to convert. + * @returns {Array} Returns the key-value pairs. + */ + function mapToArray(map) { + var index = -1, + result = Array(map.size); + + map.forEach(function(value, key) { + result[++index] = [key, value]; + }); + return result; + } + + /** + * Creates a unary function that invokes `func` with its argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ + function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; + } + + /** + * Replaces all `placeholder` elements in `array` with an internal placeholder + * and returns an array of their indexes. + * + * @private + * @param {Array} array The array to modify. + * @param {*} placeholder The placeholder to replace. + * @returns {Array} Returns the new array of placeholder indexes. + */ + function replaceHolders(array, placeholder) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value === placeholder || value === PLACEHOLDER) { + array[index] = PLACEHOLDER; + result[resIndex++] = index; + } + } + return result; + } + + /** + * Gets the value at `key`, unless `key` is "__proto__". + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ + function safeGet(object, key) { + return key == '__proto__' + ? undefined + : object[key]; + } + + /** + * Converts `set` to an array of its values. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the values. + */ + function setToArray(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = value; + }); + return result; + } + + /** + * Converts `set` to its value-value pairs. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the value-value pairs. + */ + function setToPairs(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = [value, value]; + }); + return result; + } + + /** + * A specialized version of `_.indexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function strictIndexOf(array, value, fromIndex) { + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (array[index] === value) { + return index; + } + } + return -1; + } + + /** + * A specialized version of `_.lastIndexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function strictLastIndexOf(array, value, fromIndex) { + var index = fromIndex + 1; + while (index--) { + if (array[index] === value) { + return index; + } + } + return index; + } + + /** + * Gets the number of symbols in `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the string size. + */ + function stringSize(string) { + return hasUnicode(string) + ? unicodeSize(string) + : asciiSize(string); + } + + /** + * Converts `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function stringToArray(string) { + return hasUnicode(string) + ? unicodeToArray(string) + : asciiToArray(string); + } + + /** + * Used by `_.unescape` to convert HTML entities to characters. + * + * @private + * @param {string} chr The matched character to unescape. + * @returns {string} Returns the unescaped character. + */ + var unescapeHtmlChar = basePropertyOf(htmlUnescapes); + + /** + * Gets the size of a Unicode `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ + function unicodeSize(string) { + var result = reUnicode.lastIndex = 0; + while (reUnicode.test(string)) { + ++result; + } + return result; + } + + /** + * Converts a Unicode `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function unicodeToArray(string) { + return string.match(reUnicode) || []; + } + + /** + * Splits a Unicode `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ + function unicodeWords(string) { + return string.match(reUnicodeWord) || []; + } + + /*--------------------------------------------------------------------------*/ + + /** + * Create a new pristine `lodash` function using the `context` object. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Util + * @param {Object} [context=root] The context object. + * @returns {Function} Returns a new `lodash` function. + * @example + * + * _.mixin({ 'foo': _.constant('foo') }); + * + * var lodash = _.runInContext(); + * lodash.mixin({ 'bar': lodash.constant('bar') }); + * + * _.isFunction(_.foo); + * // => true + * _.isFunction(_.bar); + * // => false + * + * lodash.isFunction(lodash.foo); + * // => false + * lodash.isFunction(lodash.bar); + * // => true + * + * // Create a suped-up `defer` in Node.js. + * var defer = _.runInContext({ 'setTimeout': setImmediate }).defer; + */ + var runInContext = (function runInContext(context) { + context = context == null ? root : _.defaults(root.Object(), context, _.pick(root, contextProps)); + + /** Built-in constructor references. */ + var Array = context.Array, + Date = context.Date, + Error = context.Error, + Function = context.Function, + Math = context.Math, + Object = context.Object, + RegExp = context.RegExp, + String = context.String, + TypeError = context.TypeError; + + /** Used for built-in method references. */ + var arrayProto = Array.prototype, + funcProto = Function.prototype, + objectProto = Object.prototype; + + /** Used to detect overreaching core-js shims. */ + var coreJsData = context['__core-js_shared__']; + + /** Used to resolve the decompiled source of functions. */ + var funcToString = funcProto.toString; + + /** Used to check objects for own properties. */ + var hasOwnProperty = objectProto.hasOwnProperty; + + /** Used to generate unique IDs. */ + var idCounter = 0; + + /** Used to detect methods masquerading as native. */ + var maskSrcKey = (function() { + var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); + return uid ? ('Symbol(src)_1.' + uid) : ''; + }()); + + /** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ + var nativeObjectToString = objectProto.toString; + + /** Used to infer the `Object` constructor. */ + var objectCtorString = funcToString.call(Object); + + /** Used to restore the original `_` reference in `_.noConflict`. */ + var oldDash = root._; + + /** Used to detect if a method is native. */ + var reIsNative = RegExp('^' + + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') + .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' + ); + + /** Built-in value references. */ + var Buffer = moduleExports ? context.Buffer : undefined, + Symbol = context.Symbol, + Uint8Array = context.Uint8Array, + allocUnsafe = Buffer ? Buffer.allocUnsafe : undefined, + getPrototype = overArg(Object.getPrototypeOf, Object), + objectCreate = Object.create, + propertyIsEnumerable = objectProto.propertyIsEnumerable, + splice = arrayProto.splice, + spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined, + symIterator = Symbol ? Symbol.iterator : undefined, + symToStringTag = Symbol ? Symbol.toStringTag : undefined; + + var defineProperty = (function() { + try { + var func = getNative(Object, 'defineProperty'); + func({}, '', {}); + return func; + } catch (e) {} + }()); + + /** Mocked built-ins. */ + var ctxClearTimeout = context.clearTimeout !== root.clearTimeout && context.clearTimeout, + ctxNow = Date && Date.now !== root.Date.now && Date.now, + ctxSetTimeout = context.setTimeout !== root.setTimeout && context.setTimeout; + + /* Built-in method references for those with the same name as other `lodash` methods. */ + var nativeCeil = Math.ceil, + nativeFloor = Math.floor, + nativeGetSymbols = Object.getOwnPropertySymbols, + nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined, + nativeIsFinite = context.isFinite, + nativeJoin = arrayProto.join, + nativeKeys = overArg(Object.keys, Object), + nativeMax = Math.max, + nativeMin = Math.min, + nativeNow = Date.now, + nativeParseInt = context.parseInt, + nativeRandom = Math.random, + nativeReverse = arrayProto.reverse; + + /* Built-in method references that are verified to be native. */ + var DataView = getNative(context, 'DataView'), + Map = getNative(context, 'Map'), + Promise = getNative(context, 'Promise'), + Set = getNative(context, 'Set'), + WeakMap = getNative(context, 'WeakMap'), + nativeCreate = getNative(Object, 'create'); + + /** Used to store function metadata. */ + var metaMap = WeakMap && new WeakMap; + + /** Used to lookup unminified function names. */ + var realNames = {}; + + /** Used to detect maps, sets, and weakmaps. */ + var dataViewCtorString = toSource(DataView), + mapCtorString = toSource(Map), + promiseCtorString = toSource(Promise), + setCtorString = toSource(Set), + weakMapCtorString = toSource(WeakMap); + + /** Used to convert symbols to primitives and strings. */ + var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolValueOf = symbolProto ? symbolProto.valueOf : undefined, + symbolToString = symbolProto ? symbolProto.toString : undefined; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a `lodash` object which wraps `value` to enable implicit method + * chain sequences. Methods that operate on and return arrays, collections, + * and functions can be chained together. Methods that retrieve a single value + * or may return a primitive value will automatically end the chain sequence + * and return the unwrapped value. Otherwise, the value must be unwrapped + * with `_#value`. + * + * Explicit chain sequences, which must be unwrapped with `_#value`, may be + * enabled using `_.chain`. + * + * The execution of chained methods is lazy, that is, it's deferred until + * `_#value` is implicitly or explicitly called. + * + * Lazy evaluation allows several methods to support shortcut fusion. + * Shortcut fusion is an optimization to merge iteratee calls; this avoids + * the creation of intermediate arrays and can greatly reduce the number of + * iteratee executions. Sections of a chain sequence qualify for shortcut + * fusion if the section is applied to an array and iteratees accept only + * one argument. The heuristic for whether a section qualifies for shortcut + * fusion is subject to change. + * + * Chaining is supported in custom builds as long as the `_#value` method is + * directly or indirectly included in the build. + * + * In addition to lodash methods, wrappers have `Array` and `String` methods. + * + * The wrapper `Array` methods are: + * `concat`, `join`, `pop`, `push`, `shift`, `sort`, `splice`, and `unshift` + * + * The wrapper `String` methods are: + * `replace` and `split` + * + * The wrapper methods that support shortcut fusion are: + * `at`, `compact`, `drop`, `dropRight`, `dropWhile`, `filter`, `find`, + * `findLast`, `head`, `initial`, `last`, `map`, `reject`, `reverse`, `slice`, + * `tail`, `take`, `takeRight`, `takeRightWhile`, `takeWhile`, and `toArray` + * + * The chainable wrapper methods are: + * `after`, `ary`, `assign`, `assignIn`, `assignInWith`, `assignWith`, `at`, + * `before`, `bind`, `bindAll`, `bindKey`, `castArray`, `chain`, `chunk`, + * `commit`, `compact`, `concat`, `conforms`, `constant`, `countBy`, `create`, + * `curry`, `debounce`, `defaults`, `defaultsDeep`, `defer`, `delay`, + * `difference`, `differenceBy`, `differenceWith`, `drop`, `dropRight`, + * `dropRightWhile`, `dropWhile`, `extend`, `extendWith`, `fill`, `filter`, + * `flatMap`, `flatMapDeep`, `flatMapDepth`, `flatten`, `flattenDeep`, + * `flattenDepth`, `flip`, `flow`, `flowRight`, `fromPairs`, `functions`, + * `functionsIn`, `groupBy`, `initial`, `intersection`, `intersectionBy`, + * `intersectionWith`, `invert`, `invertBy`, `invokeMap`, `iteratee`, `keyBy`, + * `keys`, `keysIn`, `map`, `mapKeys`, `mapValues`, `matches`, `matchesProperty`, + * `memoize`, `merge`, `mergeWith`, `method`, `methodOf`, `mixin`, `negate`, + * `nthArg`, `omit`, `omitBy`, `once`, `orderBy`, `over`, `overArgs`, + * `overEvery`, `overSome`, `partial`, `partialRight`, `partition`, `pick`, + * `pickBy`, `plant`, `property`, `propertyOf`, `pull`, `pullAll`, `pullAllBy`, + * `pullAllWith`, `pullAt`, `push`, `range`, `rangeRight`, `rearg`, `reject`, + * `remove`, `rest`, `reverse`, `sampleSize`, `set`, `setWith`, `shuffle`, + * `slice`, `sort`, `sortBy`, `splice`, `spread`, `tail`, `take`, `takeRight`, + * `takeRightWhile`, `takeWhile`, `tap`, `throttle`, `thru`, `toArray`, + * `toPairs`, `toPairsIn`, `toPath`, `toPlainObject`, `transform`, `unary`, + * `union`, `unionBy`, `unionWith`, `uniq`, `uniqBy`, `uniqWith`, `unset`, + * `unshift`, `unzip`, `unzipWith`, `update`, `updateWith`, `values`, + * `valuesIn`, `without`, `wrap`, `xor`, `xorBy`, `xorWith`, `zip`, + * `zipObject`, `zipObjectDeep`, and `zipWith` + * + * The wrapper methods that are **not** chainable by default are: + * `add`, `attempt`, `camelCase`, `capitalize`, `ceil`, `clamp`, `clone`, + * `cloneDeep`, `cloneDeepWith`, `cloneWith`, `conformsTo`, `deburr`, + * `defaultTo`, `divide`, `each`, `eachRight`, `endsWith`, `eq`, `escape`, + * `escapeRegExp`, `every`, `find`, `findIndex`, `findKey`, `findLast`, + * `findLastIndex`, `findLastKey`, `first`, `floor`, `forEach`, `forEachRight`, + * `forIn`, `forInRight`, `forOwn`, `forOwnRight`, `get`, `gt`, `gte`, `has`, + * `hasIn`, `head`, `identity`, `includes`, `indexOf`, `inRange`, `invoke`, + * `isArguments`, `isArray`, `isArrayBuffer`, `isArrayLike`, `isArrayLikeObject`, + * `isBoolean`, `isBuffer`, `isDate`, `isElement`, `isEmpty`, `isEqual`, + * `isEqualWith`, `isError`, `isFinite`, `isFunction`, `isInteger`, `isLength`, + * `isMap`, `isMatch`, `isMatchWith`, `isNaN`, `isNative`, `isNil`, `isNull`, + * `isNumber`, `isObject`, `isObjectLike`, `isPlainObject`, `isRegExp`, + * `isSafeInteger`, `isSet`, `isString`, `isUndefined`, `isTypedArray`, + * `isWeakMap`, `isWeakSet`, `join`, `kebabCase`, `last`, `lastIndexOf`, + * `lowerCase`, `lowerFirst`, `lt`, `lte`, `max`, `maxBy`, `mean`, `meanBy`, + * `min`, `minBy`, `multiply`, `noConflict`, `noop`, `now`, `nth`, `pad`, + * `padEnd`, `padStart`, `parseInt`, `pop`, `random`, `reduce`, `reduceRight`, + * `repeat`, `result`, `round`, `runInContext`, `sample`, `shift`, `size`, + * `snakeCase`, `some`, `sortedIndex`, `sortedIndexBy`, `sortedLastIndex`, + * `sortedLastIndexBy`, `startCase`, `startsWith`, `stubArray`, `stubFalse`, + * `stubObject`, `stubString`, `stubTrue`, `subtract`, `sum`, `sumBy`, + * `template`, `times`, `toFinite`, `toInteger`, `toJSON`, `toLength`, + * `toLower`, `toNumber`, `toSafeInteger`, `toString`, `toUpper`, `trim`, + * `trimEnd`, `trimStart`, `truncate`, `unescape`, `uniqueId`, `upperCase`, + * `upperFirst`, `value`, and `words` + * + * @name _ + * @constructor + * @category Seq + * @param {*} value The value to wrap in a `lodash` instance. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2, 3]); + * + * // Returns an unwrapped value. + * wrapped.reduce(_.add); + * // => 6 + * + * // Returns a wrapped value. + * var squares = wrapped.map(square); + * + * _.isArray(squares); + * // => false + * + * _.isArray(squares.value()); + * // => true + */ + function lodash(value) { + if (isObjectLike(value) && !isArray(value) && !(value instanceof LazyWrapper)) { + if (value instanceof LodashWrapper) { + return value; + } + if (hasOwnProperty.call(value, '__wrapped__')) { + return wrapperClone(value); + } + } + return new LodashWrapper(value); + } + + /** + * The base implementation of `_.create` without support for assigning + * properties to the created object. + * + * @private + * @param {Object} proto The object to inherit from. + * @returns {Object} Returns the new object. + */ + var baseCreate = (function() { + function object() {} + return function(proto) { + if (!isObject(proto)) { + return {}; + } + if (objectCreate) { + return objectCreate(proto); + } + object.prototype = proto; + var result = new object; + object.prototype = undefined; + return result; + }; + }()); + + /** + * The function whose prototype chain sequence wrappers inherit from. + * + * @private + */ + function baseLodash() { + // No operation performed. + } + + /** + * The base constructor for creating `lodash` wrapper objects. + * + * @private + * @param {*} value The value to wrap. + * @param {boolean} [chainAll] Enable explicit method chain sequences. + */ + function LodashWrapper(value, chainAll) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__chain__ = !!chainAll; + this.__index__ = 0; + this.__values__ = undefined; + } + + /** + * By default, the template delimiters used by lodash are like those in + * embedded Ruby (ERB) as well as ES2015 template strings. Change the + * following template settings to use alternative delimiters. + * + * @static + * @memberOf _ + * @type {Object} + */ + lodash.templateSettings = { + + /** + * Used to detect `data` property values to be HTML-escaped. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'escape': reEscape, + + /** + * Used to detect code to be evaluated. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'evaluate': reEvaluate, + + /** + * Used to detect `data` property values to inject. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'interpolate': reInterpolate, + + /** + * Used to reference the data object in the template text. + * + * @memberOf _.templateSettings + * @type {string} + */ + 'variable': '', + + /** + * Used to import variables into the compiled template. + * + * @memberOf _.templateSettings + * @type {Object} + */ + 'imports': { + + /** + * A reference to the `lodash` function. + * + * @memberOf _.templateSettings.imports + * @type {Function} + */ + '_': lodash + } + }; + + // Ensure wrappers are instances of `baseLodash`. + lodash.prototype = baseLodash.prototype; + lodash.prototype.constructor = lodash; + + LodashWrapper.prototype = baseCreate(baseLodash.prototype); + LodashWrapper.prototype.constructor = LodashWrapper; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a lazy wrapper object which wraps `value` to enable lazy evaluation. + * + * @private + * @constructor + * @param {*} value The value to wrap. + */ + function LazyWrapper(value) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__dir__ = 1; + this.__filtered__ = false; + this.__iteratees__ = []; + this.__takeCount__ = MAX_ARRAY_LENGTH; + this.__views__ = []; + } + + /** + * Creates a clone of the lazy wrapper object. + * + * @private + * @name clone + * @memberOf LazyWrapper + * @returns {Object} Returns the cloned `LazyWrapper` object. + */ + function lazyClone() { + var result = new LazyWrapper(this.__wrapped__); + result.__actions__ = copyArray(this.__actions__); + result.__dir__ = this.__dir__; + result.__filtered__ = this.__filtered__; + result.__iteratees__ = copyArray(this.__iteratees__); + result.__takeCount__ = this.__takeCount__; + result.__views__ = copyArray(this.__views__); + return result; + } + + /** + * Reverses the direction of lazy iteration. + * + * @private + * @name reverse + * @memberOf LazyWrapper + * @returns {Object} Returns the new reversed `LazyWrapper` object. + */ + function lazyReverse() { + if (this.__filtered__) { + var result = new LazyWrapper(this); + result.__dir__ = -1; + result.__filtered__ = true; + } else { + result = this.clone(); + result.__dir__ *= -1; + } + return result; + } + + /** + * Extracts the unwrapped value from its lazy wrapper. + * + * @private + * @name value + * @memberOf LazyWrapper + * @returns {*} Returns the unwrapped value. + */ + function lazyValue() { + var array = this.__wrapped__.value(), + dir = this.__dir__, + isArr = isArray(array), + isRight = dir < 0, + arrLength = isArr ? array.length : 0, + view = getView(0, arrLength, this.__views__), + start = view.start, + end = view.end, + length = end - start, + index = isRight ? end : (start - 1), + iteratees = this.__iteratees__, + iterLength = iteratees.length, + resIndex = 0, + takeCount = nativeMin(length, this.__takeCount__); + + if (!isArr || (!isRight && arrLength == length && takeCount == length)) { + return baseWrapperValue(array, this.__actions__); + } + var result = []; + + outer: + while (length-- && resIndex < takeCount) { + index += dir; + + var iterIndex = -1, + value = array[index]; + + while (++iterIndex < iterLength) { + var data = iteratees[iterIndex], + iteratee = data.iteratee, + type = data.type, + computed = iteratee(value); + + if (type == LAZY_MAP_FLAG) { + value = computed; + } else if (!computed) { + if (type == LAZY_FILTER_FLAG) { + continue outer; + } else { + break outer; + } + } + } + result[resIndex++] = value; + } + return result; + } + + // Ensure `LazyWrapper` is an instance of `baseLodash`. + LazyWrapper.prototype = baseCreate(baseLodash.prototype); + LazyWrapper.prototype.constructor = LazyWrapper; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a hash object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function Hash(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the hash. + * + * @private + * @name clear + * @memberOf Hash + */ + function hashClear() { + this.__data__ = nativeCreate ? nativeCreate(null) : {}; + this.size = 0; + } + + /** + * Removes `key` and its value from the hash. + * + * @private + * @name delete + * @memberOf Hash + * @param {Object} hash The hash to modify. + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function hashDelete(key) { + var result = this.has(key) && delete this.__data__[key]; + this.size -= result ? 1 : 0; + return result; + } + + /** + * Gets the hash value for `key`. + * + * @private + * @name get + * @memberOf Hash + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function hashGet(key) { + var data = this.__data__; + if (nativeCreate) { + var result = data[key]; + return result === HASH_UNDEFINED ? undefined : result; + } + return hasOwnProperty.call(data, key) ? data[key] : undefined; + } + + /** + * Checks if a hash value for `key` exists. + * + * @private + * @name has + * @memberOf Hash + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function hashHas(key) { + var data = this.__data__; + return nativeCreate ? (data[key] !== undefined) : hasOwnProperty.call(data, key); + } + + /** + * Sets the hash `key` to `value`. + * + * @private + * @name set + * @memberOf Hash + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the hash instance. + */ + function hashSet(key, value) { + var data = this.__data__; + this.size += this.has(key) ? 0 : 1; + data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; + return this; + } + + // Add methods to `Hash`. + Hash.prototype.clear = hashClear; + Hash.prototype['delete'] = hashDelete; + Hash.prototype.get = hashGet; + Hash.prototype.has = hashHas; + Hash.prototype.set = hashSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates an list cache object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function ListCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the list cache. + * + * @private + * @name clear + * @memberOf ListCache + */ + function listCacheClear() { + this.__data__ = []; + this.size = 0; + } + + /** + * Removes `key` and its value from the list cache. + * + * @private + * @name delete + * @memberOf ListCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function listCacheDelete(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + return false; + } + var lastIndex = data.length - 1; + if (index == lastIndex) { + data.pop(); + } else { + splice.call(data, index, 1); + } + --this.size; + return true; + } + + /** + * Gets the list cache value for `key`. + * + * @private + * @name get + * @memberOf ListCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function listCacheGet(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + return index < 0 ? undefined : data[index][1]; + } + + /** + * Checks if a list cache value for `key` exists. + * + * @private + * @name has + * @memberOf ListCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function listCacheHas(key) { + return assocIndexOf(this.__data__, key) > -1; + } + + /** + * Sets the list cache `key` to `value`. + * + * @private + * @name set + * @memberOf ListCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the list cache instance. + */ + function listCacheSet(key, value) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + ++this.size; + data.push([key, value]); + } else { + data[index][1] = value; + } + return this; + } + + // Add methods to `ListCache`. + ListCache.prototype.clear = listCacheClear; + ListCache.prototype['delete'] = listCacheDelete; + ListCache.prototype.get = listCacheGet; + ListCache.prototype.has = listCacheHas; + ListCache.prototype.set = listCacheSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a map cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function MapCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the map. + * + * @private + * @name clear + * @memberOf MapCache + */ + function mapCacheClear() { + this.size = 0; + this.__data__ = { + 'hash': new Hash, + 'map': new (Map || ListCache), + 'string': new Hash + }; + } + + /** + * Removes `key` and its value from the map. + * + * @private + * @name delete + * @memberOf MapCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function mapCacheDelete(key) { + var result = getMapData(this, key)['delete'](key); + this.size -= result ? 1 : 0; + return result; + } + + /** + * Gets the map value for `key`. + * + * @private + * @name get + * @memberOf MapCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function mapCacheGet(key) { + return getMapData(this, key).get(key); + } + + /** + * Checks if a map value for `key` exists. + * + * @private + * @name has + * @memberOf MapCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function mapCacheHas(key) { + return getMapData(this, key).has(key); + } + + /** + * Sets the map `key` to `value`. + * + * @private + * @name set + * @memberOf MapCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the map cache instance. + */ + function mapCacheSet(key, value) { + var data = getMapData(this, key), + size = data.size; + + data.set(key, value); + this.size += data.size == size ? 0 : 1; + return this; + } + + // Add methods to `MapCache`. + MapCache.prototype.clear = mapCacheClear; + MapCache.prototype['delete'] = mapCacheDelete; + MapCache.prototype.get = mapCacheGet; + MapCache.prototype.has = mapCacheHas; + MapCache.prototype.set = mapCacheSet; + + /*------------------------------------------------------------------------*/ + + /** + * + * Creates an array cache object to store unique values. + * + * @private + * @constructor + * @param {Array} [values] The values to cache. + */ + function SetCache(values) { + var index = -1, + length = values == null ? 0 : values.length; + + this.__data__ = new MapCache; + while (++index < length) { + this.add(values[index]); + } + } + + /** + * Adds `value` to the array cache. + * + * @private + * @name add + * @memberOf SetCache + * @alias push + * @param {*} value The value to cache. + * @returns {Object} Returns the cache instance. + */ + function setCacheAdd(value) { + this.__data__.set(value, HASH_UNDEFINED); + return this; + } + + /** + * Checks if `value` is in the array cache. + * + * @private + * @name has + * @memberOf SetCache + * @param {*} value The value to search for. + * @returns {number} Returns `true` if `value` is found, else `false`. + */ + function setCacheHas(value) { + return this.__data__.has(value); + } + + // Add methods to `SetCache`. + SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; + SetCache.prototype.has = setCacheHas; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a stack cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function Stack(entries) { + var data = this.__data__ = new ListCache(entries); + this.size = data.size; + } + + /** + * Removes all key-value entries from the stack. + * + * @private + * @name clear + * @memberOf Stack + */ + function stackClear() { + this.__data__ = new ListCache; + this.size = 0; + } + + /** + * Removes `key` and its value from the stack. + * + * @private + * @name delete + * @memberOf Stack + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function stackDelete(key) { + var data = this.__data__, + result = data['delete'](key); + + this.size = data.size; + return result; + } + + /** + * Gets the stack value for `key`. + * + * @private + * @name get + * @memberOf Stack + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function stackGet(key) { + return this.__data__.get(key); + } + + /** + * Checks if a stack value for `key` exists. + * + * @private + * @name has + * @memberOf Stack + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function stackHas(key) { + return this.__data__.has(key); + } + + /** + * Sets the stack `key` to `value`. + * + * @private + * @name set + * @memberOf Stack + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the stack cache instance. + */ + function stackSet(key, value) { + var data = this.__data__; + if (data instanceof ListCache) { + var pairs = data.__data__; + if (!Map || (pairs.length < LARGE_ARRAY_SIZE - 1)) { + pairs.push([key, value]); + this.size = ++data.size; + return this; + } + data = this.__data__ = new MapCache(pairs); + } + data.set(key, value); + this.size = data.size; + return this; + } + + // Add methods to `Stack`. + Stack.prototype.clear = stackClear; + Stack.prototype['delete'] = stackDelete; + Stack.prototype.get = stackGet; + Stack.prototype.has = stackHas; + Stack.prototype.set = stackSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates an array of the enumerable property names of the array-like `value`. + * + * @private + * @param {*} value The value to query. + * @param {boolean} inherited Specify returning inherited property names. + * @returns {Array} Returns the array of property names. + */ + function arrayLikeKeys(value, inherited) { + var isArr = isArray(value), + isArg = !isArr && isArguments(value), + isBuff = !isArr && !isArg && isBuffer(value), + isType = !isArr && !isArg && !isBuff && isTypedArray(value), + skipIndexes = isArr || isArg || isBuff || isType, + result = skipIndexes ? baseTimes(value.length, String) : [], + length = result.length; + + for (var key in value) { + if ((inherited || hasOwnProperty.call(value, key)) && + !(skipIndexes && ( + // Safari 9 has enumerable `arguments.length` in strict mode. + key == 'length' || + // Node.js 0.10 has enumerable non-index properties on buffers. + (isBuff && (key == 'offset' || key == 'parent')) || + // PhantomJS 2 has enumerable non-index properties on typed arrays. + (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || + // Skip index properties. + isIndex(key, length) + ))) { + result.push(key); + } + } + return result; + } + + /** + * A specialized version of `_.sample` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @returns {*} Returns the random element. + */ + function arraySample(array) { + var length = array.length; + return length ? array[baseRandom(0, length - 1)] : undefined; + } + + /** + * A specialized version of `_.sampleSize` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ + function arraySampleSize(array, n) { + return shuffleSelf(copyArray(array), baseClamp(n, 0, array.length)); + } + + /** + * A specialized version of `_.shuffle` for arrays. + * + * @private + * @param {Array} array The array to shuffle. + * @returns {Array} Returns the new shuffled array. + */ + function arrayShuffle(array) { + return shuffleSelf(copyArray(array)); + } + + /** + * This function is like `assignValue` except that it doesn't assign + * `undefined` values. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function assignMergeValue(object, key, value) { + if ((value !== undefined && !eq(object[key], value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } + } + + /** + * Assigns `value` to `key` of `object` if the existing value is not equivalent + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function assignValue(object, key, value) { + var objValue = object[key]; + if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } + } + + /** + * Gets the index at which the `key` is found in `array` of key-value pairs. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} key The key to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function assocIndexOf(array, key) { + var length = array.length; + while (length--) { + if (eq(array[length][0], key)) { + return length; + } + } + return -1; + } + + /** + * Aggregates elements of `collection` on `accumulator` with keys transformed + * by `iteratee` and values set by `setter`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ + function baseAggregator(collection, setter, iteratee, accumulator) { + baseEach(collection, function(value, key, collection) { + setter(accumulator, value, iteratee(value), collection); + }); + return accumulator; + } + + /** + * The base implementation of `_.assign` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ + function baseAssign(object, source) { + return object && copyObject(source, keys(source), object); + } + + /** + * The base implementation of `_.assignIn` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ + function baseAssignIn(object, source) { + return object && copyObject(source, keysIn(source), object); + } + + /** + * The base implementation of `assignValue` and `assignMergeValue` without + * value checks. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function baseAssignValue(object, key, value) { + if (key == '__proto__' && defineProperty) { + defineProperty(object, key, { + 'configurable': true, + 'enumerable': true, + 'value': value, + 'writable': true + }); + } else { + object[key] = value; + } + } + + /** + * The base implementation of `_.at` without support for individual paths. + * + * @private + * @param {Object} object The object to iterate over. + * @param {string[]} paths The property paths to pick. + * @returns {Array} Returns the picked elements. + */ + function baseAt(object, paths) { + var index = -1, + length = paths.length, + result = Array(length), + skip = object == null; + + while (++index < length) { + result[index] = skip ? undefined : get(object, paths[index]); + } + return result; + } + + /** + * The base implementation of `_.clamp` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + */ + function baseClamp(number, lower, upper) { + if (number === number) { + if (upper !== undefined) { + number = number <= upper ? number : upper; + } + if (lower !== undefined) { + number = number >= lower ? number : lower; + } + } + return number; + } + + /** + * The base implementation of `_.clone` and `_.cloneDeep` which tracks + * traversed objects. + * + * @private + * @param {*} value The value to clone. + * @param {boolean} bitmask The bitmask flags. + * 1 - Deep clone + * 2 - Flatten inherited properties + * 4 - Clone symbols + * @param {Function} [customizer] The function to customize cloning. + * @param {string} [key] The key of `value`. + * @param {Object} [object] The parent object of `value`. + * @param {Object} [stack] Tracks traversed objects and their clone counterparts. + * @returns {*} Returns the cloned value. + */ + function baseClone(value, bitmask, customizer, key, object, stack) { + var result, + isDeep = bitmask & CLONE_DEEP_FLAG, + isFlat = bitmask & CLONE_FLAT_FLAG, + isFull = bitmask & CLONE_SYMBOLS_FLAG; + + if (customizer) { + result = object ? customizer(value, key, object, stack) : customizer(value); + } + if (result !== undefined) { + return result; + } + if (!isObject(value)) { + return value; + } + var isArr = isArray(value); + if (isArr) { + result = initCloneArray(value); + if (!isDeep) { + return copyArray(value, result); + } + } else { + var tag = getTag(value), + isFunc = tag == funcTag || tag == genTag; + + if (isBuffer(value)) { + return cloneBuffer(value, isDeep); + } + if (tag == objectTag || tag == argsTag || (isFunc && !object)) { + result = (isFlat || isFunc) ? {} : initCloneObject(value); + if (!isDeep) { + return isFlat + ? copySymbolsIn(value, baseAssignIn(result, value)) + : copySymbols(value, baseAssign(result, value)); + } + } else { + if (!cloneableTags[tag]) { + return object ? value : {}; + } + result = initCloneByTag(value, tag, isDeep); + } + } + // Check for circular references and return its corresponding clone. + stack || (stack = new Stack); + var stacked = stack.get(value); + if (stacked) { + return stacked; + } + stack.set(value, result); + + if (isSet(value)) { + value.forEach(function(subValue) { + result.add(baseClone(subValue, bitmask, customizer, subValue, value, stack)); + }); + + return result; + } + + if (isMap(value)) { + value.forEach(function(subValue, key) { + result.set(key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + + return result; + } + + var keysFunc = isFull + ? (isFlat ? getAllKeysIn : getAllKeys) + : (isFlat ? keysIn : keys); + + var props = isArr ? undefined : keysFunc(value); + arrayEach(props || value, function(subValue, key) { + if (props) { + key = subValue; + subValue = value[key]; + } + // Recursively populate clone (susceptible to call stack limits). + assignValue(result, key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + return result; + } + + /** + * The base implementation of `_.conforms` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property predicates to conform to. + * @returns {Function} Returns the new spec function. + */ + function baseConforms(source) { + var props = keys(source); + return function(object) { + return baseConformsTo(object, source, props); + }; + } + + /** + * The base implementation of `_.conformsTo` which accepts `props` to check. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + */ + function baseConformsTo(object, source, props) { + var length = props.length; + if (object == null) { + return !length; + } + object = Object(object); + while (length--) { + var key = props[length], + predicate = source[key], + value = object[key]; + + if ((value === undefined && !(key in object)) || !predicate(value)) { + return false; + } + } + return true; + } + + /** + * The base implementation of `_.delay` and `_.defer` which accepts `args` + * to provide to `func`. + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {Array} args The arguments to provide to `func`. + * @returns {number|Object} Returns the timer id or timeout object. + */ + function baseDelay(func, wait, args) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return setTimeout(function() { func.apply(undefined, args); }, wait); + } + + /** + * The base implementation of methods like `_.difference` without support + * for excluding multiple arrays or iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Array} values The values to exclude. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + */ + function baseDifference(array, values, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + isCommon = true, + length = array.length, + result = [], + valuesLength = values.length; + + if (!length) { + return result; + } + if (iteratee) { + values = arrayMap(values, baseUnary(iteratee)); + } + if (comparator) { + includes = arrayIncludesWith; + isCommon = false; + } + else if (values.length >= LARGE_ARRAY_SIZE) { + includes = cacheHas; + isCommon = false; + values = new SetCache(values); + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee == null ? value : iteratee(value); + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var valuesIndex = valuesLength; + while (valuesIndex--) { + if (values[valuesIndex] === computed) { + continue outer; + } + } + result.push(value); + } + else if (!includes(values, computed, comparator)) { + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.forEach` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEach = createBaseEach(baseForOwn); + + /** + * The base implementation of `_.forEachRight` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEachRight = createBaseEach(baseForOwnRight, true); + + /** + * The base implementation of `_.every` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false` + */ + function baseEvery(collection, predicate) { + var result = true; + baseEach(collection, function(value, index, collection) { + result = !!predicate(value, index, collection); + return result; + }); + return result; + } + + /** + * The base implementation of methods like `_.max` and `_.min` which accepts a + * `comparator` to determine the extremum value. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The iteratee invoked per iteration. + * @param {Function} comparator The comparator used to compare values. + * @returns {*} Returns the extremum value. + */ + function baseExtremum(array, iteratee, comparator) { + var index = -1, + length = array.length; + + while (++index < length) { + var value = array[index], + current = iteratee(value); + + if (current != null && (computed === undefined + ? (current === current && !isSymbol(current)) + : comparator(current, computed) + )) { + var computed = current, + result = value; + } + } + return result; + } + + /** + * The base implementation of `_.fill` without an iteratee call guard. + * + * @private + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + */ + function baseFill(array, value, start, end) { + var length = array.length; + + start = toInteger(start); + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = (end === undefined || end > length) ? length : toInteger(end); + if (end < 0) { + end += length; + } + end = start > end ? 0 : toLength(end); + while (start < end) { + array[start++] = value; + } + return array; + } + + /** + * The base implementation of `_.filter` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function baseFilter(collection, predicate) { + var result = []; + baseEach(collection, function(value, index, collection) { + if (predicate(value, index, collection)) { + result.push(value); + } + }); + return result; + } + + /** + * The base implementation of `_.flatten` with support for restricting flattening. + * + * @private + * @param {Array} array The array to flatten. + * @param {number} depth The maximum recursion depth. + * @param {boolean} [predicate=isFlattenable] The function invoked per iteration. + * @param {boolean} [isStrict] Restrict to values that pass `predicate` checks. + * @param {Array} [result=[]] The initial result value. + * @returns {Array} Returns the new flattened array. + */ + function baseFlatten(array, depth, predicate, isStrict, result) { + var index = -1, + length = array.length; + + predicate || (predicate = isFlattenable); + result || (result = []); + + while (++index < length) { + var value = array[index]; + if (depth > 0 && predicate(value)) { + if (depth > 1) { + // Recursively flatten arrays (susceptible to call stack limits). + baseFlatten(value, depth - 1, predicate, isStrict, result); + } else { + arrayPush(result, value); + } + } else if (!isStrict) { + result[result.length] = value; + } + } + return result; + } + + /** + * The base implementation of `baseForOwn` which iterates over `object` + * properties returned by `keysFunc` and invokes `iteratee` for each property. + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseFor = createBaseFor(); + + /** + * This function is like `baseFor` except that it iterates over properties + * in the opposite order. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseForRight = createBaseFor(true); + + /** + * The base implementation of `_.forOwn` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwn(object, iteratee) { + return object && baseFor(object, iteratee, keys); + } + + /** + * The base implementation of `_.forOwnRight` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwnRight(object, iteratee) { + return object && baseForRight(object, iteratee, keys); + } + + /** + * The base implementation of `_.functions` which creates an array of + * `object` function property names filtered from `props`. + * + * @private + * @param {Object} object The object to inspect. + * @param {Array} props The property names to filter. + * @returns {Array} Returns the function names. + */ + function baseFunctions(object, props) { + return arrayFilter(props, function(key) { + return isFunction(object[key]); + }); + } + + /** + * The base implementation of `_.get` without support for default values. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @returns {*} Returns the resolved value. + */ + function baseGet(object, path) { + path = castPath(path, object); + + var index = 0, + length = path.length; + + while (object != null && index < length) { + object = object[toKey(path[index++])]; + } + return (index && index == length) ? object : undefined; + } + + /** + * The base implementation of `getAllKeys` and `getAllKeysIn` which uses + * `keysFunc` and `symbolsFunc` to get the enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Function} keysFunc The function to get the keys of `object`. + * @param {Function} symbolsFunc The function to get the symbols of `object`. + * @returns {Array} Returns the array of property names and symbols. + */ + function baseGetAllKeys(object, keysFunc, symbolsFunc) { + var result = keysFunc(object); + return isArray(object) ? result : arrayPush(result, symbolsFunc(object)); + } + + /** + * The base implementation of `getTag` without fallbacks for buggy environments. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + function baseGetTag(value) { + if (value == null) { + return value === undefined ? undefinedTag : nullTag; + } + return (symToStringTag && symToStringTag in Object(value)) + ? getRawTag(value) + : objectToString(value); + } + + /** + * The base implementation of `_.gt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + */ + function baseGt(value, other) { + return value > other; + } + + /** + * The base implementation of `_.has` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ + function baseHas(object, key) { + return object != null && hasOwnProperty.call(object, key); + } + + /** + * The base implementation of `_.hasIn` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ + function baseHasIn(object, key) { + return object != null && key in Object(object); + } + + /** + * The base implementation of `_.inRange` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to check. + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + */ + function baseInRange(number, start, end) { + return number >= nativeMin(start, end) && number < nativeMax(start, end); + } + + /** + * The base implementation of methods like `_.intersection`, without support + * for iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of shared values. + */ + function baseIntersection(arrays, iteratee, comparator) { + var includes = comparator ? arrayIncludesWith : arrayIncludes, + length = arrays[0].length, + othLength = arrays.length, + othIndex = othLength, + caches = Array(othLength), + maxLength = Infinity, + result = []; + + while (othIndex--) { + var array = arrays[othIndex]; + if (othIndex && iteratee) { + array = arrayMap(array, baseUnary(iteratee)); + } + maxLength = nativeMin(array.length, maxLength); + caches[othIndex] = !comparator && (iteratee || (length >= 120 && array.length >= 120)) + ? new SetCache(othIndex && array) + : undefined; + } + array = arrays[0]; + + var index = -1, + seen = caches[0]; + + outer: + while (++index < length && result.length < maxLength) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (!(seen + ? cacheHas(seen, computed) + : includes(result, computed, comparator) + )) { + othIndex = othLength; + while (--othIndex) { + var cache = caches[othIndex]; + if (!(cache + ? cacheHas(cache, computed) + : includes(arrays[othIndex], computed, comparator)) + ) { + continue outer; + } + } + if (seen) { + seen.push(computed); + } + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.invert` and `_.invertBy` which inverts + * `object` with values transformed by `iteratee` and set by `setter`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform values. + * @param {Object} accumulator The initial inverted object. + * @returns {Function} Returns `accumulator`. + */ + function baseInverter(object, setter, iteratee, accumulator) { + baseForOwn(object, function(value, key, object) { + setter(accumulator, iteratee(value), key, object); + }); + return accumulator; + } + + /** + * The base implementation of `_.invoke` without support for individual + * method arguments. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {Array} args The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + */ + function baseInvoke(object, path, args) { + path = castPath(path, object); + object = parent(object, path); + var func = object == null ? object : object[toKey(last(path))]; + return func == null ? undefined : apply(func, object, args); + } + + /** + * The base implementation of `_.isArguments`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + */ + function baseIsArguments(value) { + return isObjectLike(value) && baseGetTag(value) == argsTag; + } + + /** + * The base implementation of `_.isArrayBuffer` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + */ + function baseIsArrayBuffer(value) { + return isObjectLike(value) && baseGetTag(value) == arrayBufferTag; + } + + /** + * The base implementation of `_.isDate` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + */ + function baseIsDate(value) { + return isObjectLike(value) && baseGetTag(value) == dateTag; + } + + /** + * The base implementation of `_.isEqual` which supports partial comparisons + * and tracks traversed objects. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {boolean} bitmask The bitmask flags. + * 1 - Unordered comparison + * 2 - Partial comparison + * @param {Function} [customizer] The function to customize comparisons. + * @param {Object} [stack] Tracks traversed `value` and `other` objects. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + */ + function baseIsEqual(value, other, bitmask, customizer, stack) { + if (value === other) { + return true; + } + if (value == null || other == null || (!isObjectLike(value) && !isObjectLike(other))) { + return value !== value && other !== other; + } + return baseIsEqualDeep(value, other, bitmask, customizer, baseIsEqual, stack); + } + + /** + * A specialized version of `baseIsEqual` for arrays and objects which performs + * deep comparisons and tracks traversed objects enabling objects with circular + * references to be compared. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} [stack] Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function baseIsEqualDeep(object, other, bitmask, customizer, equalFunc, stack) { + var objIsArr = isArray(object), + othIsArr = isArray(other), + objTag = objIsArr ? arrayTag : getTag(object), + othTag = othIsArr ? arrayTag : getTag(other); + + objTag = objTag == argsTag ? objectTag : objTag; + othTag = othTag == argsTag ? objectTag : othTag; + + var objIsObj = objTag == objectTag, + othIsObj = othTag == objectTag, + isSameTag = objTag == othTag; + + if (isSameTag && isBuffer(object)) { + if (!isBuffer(other)) { + return false; + } + objIsArr = true; + objIsObj = false; + } + if (isSameTag && !objIsObj) { + stack || (stack = new Stack); + return (objIsArr || isTypedArray(object)) + ? equalArrays(object, other, bitmask, customizer, equalFunc, stack) + : equalByTag(object, other, objTag, bitmask, customizer, equalFunc, stack); + } + if (!(bitmask & COMPARE_PARTIAL_FLAG)) { + var objIsWrapped = objIsObj && hasOwnProperty.call(object, '__wrapped__'), + othIsWrapped = othIsObj && hasOwnProperty.call(other, '__wrapped__'); + + if (objIsWrapped || othIsWrapped) { + var objUnwrapped = objIsWrapped ? object.value() : object, + othUnwrapped = othIsWrapped ? other.value() : other; + + stack || (stack = new Stack); + return equalFunc(objUnwrapped, othUnwrapped, bitmask, customizer, stack); + } + } + if (!isSameTag) { + return false; + } + stack || (stack = new Stack); + return equalObjects(object, other, bitmask, customizer, equalFunc, stack); + } + + /** + * The base implementation of `_.isMap` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + */ + function baseIsMap(value) { + return isObjectLike(value) && getTag(value) == mapTag; + } + + /** + * The base implementation of `_.isMatch` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Array} matchData The property names, values, and compare flags to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + */ + function baseIsMatch(object, source, matchData, customizer) { + var index = matchData.length, + length = index, + noCustomizer = !customizer; + + if (object == null) { + return !length; + } + object = Object(object); + while (index--) { + var data = matchData[index]; + if ((noCustomizer && data[2]) + ? data[1] !== object[data[0]] + : !(data[0] in object) + ) { + return false; + } + } + while (++index < length) { + data = matchData[index]; + var key = data[0], + objValue = object[key], + srcValue = data[1]; + + if (noCustomizer && data[2]) { + if (objValue === undefined && !(key in object)) { + return false; + } + } else { + var stack = new Stack; + if (customizer) { + var result = customizer(objValue, srcValue, key, object, source, stack); + } + if (!(result === undefined + ? baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG, customizer, stack) + : result + )) { + return false; + } + } + } + return true; + } + + /** + * The base implementation of `_.isNative` without bad shim checks. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + */ + function baseIsNative(value) { + if (!isObject(value) || isMasked(value)) { + return false; + } + var pattern = isFunction(value) ? reIsNative : reIsHostCtor; + return pattern.test(toSource(value)); + } + + /** + * The base implementation of `_.isRegExp` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + */ + function baseIsRegExp(value) { + return isObjectLike(value) && baseGetTag(value) == regexpTag; + } + + /** + * The base implementation of `_.isSet` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + */ + function baseIsSet(value) { + return isObjectLike(value) && getTag(value) == setTag; + } + + /** + * The base implementation of `_.isTypedArray` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + */ + function baseIsTypedArray(value) { + return isObjectLike(value) && + isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; + } + + /** + * The base implementation of `_.iteratee`. + * + * @private + * @param {*} [value=_.identity] The value to convert to an iteratee. + * @returns {Function} Returns the iteratee. + */ + function baseIteratee(value) { + // Don't store the `typeof` result in a variable to avoid a JIT bug in Safari 9. + // See https://bugs.webkit.org/show_bug.cgi?id=156034 for more details. + if (typeof value == 'function') { + return value; + } + if (value == null) { + return identity; + } + if (typeof value == 'object') { + return isArray(value) + ? baseMatchesProperty(value[0], value[1]) + : baseMatches(value); + } + return property(value); + } + + /** + * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function baseKeys(object) { + if (!isPrototype(object)) { + return nativeKeys(object); + } + var result = []; + for (var key in Object(object)) { + if (hasOwnProperty.call(object, key) && key != 'constructor') { + result.push(key); + } + } + return result; + } + + /** + * The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function baseKeysIn(object) { + if (!isObject(object)) { + return nativeKeysIn(object); + } + var isProto = isPrototype(object), + result = []; + + for (var key in object) { + if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) { + result.push(key); + } + } + return result; + } + + /** + * The base implementation of `_.lt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + */ + function baseLt(value, other) { + return value < other; + } + + /** + * The base implementation of `_.map` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function baseMap(collection, iteratee) { + var index = -1, + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value, key, collection) { + result[++index] = iteratee(value, key, collection); + }); + return result; + } + + /** + * The base implementation of `_.matches` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property values to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatches(source) { + var matchData = getMatchData(source); + if (matchData.length == 1 && matchData[0][2]) { + return matchesStrictComparable(matchData[0][0], matchData[0][1]); + } + return function(object) { + return object === source || baseIsMatch(object, source, matchData); + }; + } + + /** + * The base implementation of `_.matchesProperty` which doesn't clone `srcValue`. + * + * @private + * @param {string} path The path of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatchesProperty(path, srcValue) { + if (isKey(path) && isStrictComparable(srcValue)) { + return matchesStrictComparable(toKey(path), srcValue); + } + return function(object) { + var objValue = get(object, path); + return (objValue === undefined && objValue === srcValue) + ? hasIn(object, path) + : baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG); + }; + } + + /** + * The base implementation of `_.merge` without support for multiple sources. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {number} srcIndex The index of `source`. + * @param {Function} [customizer] The function to customize merged values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ + function baseMerge(object, source, srcIndex, customizer, stack) { + if (object === source) { + return; + } + baseFor(source, function(srcValue, key) { + if (isObject(srcValue)) { + stack || (stack = new Stack); + baseMergeDeep(object, source, key, srcIndex, baseMerge, customizer, stack); + } + else { + var newValue = customizer + ? customizer(safeGet(object, key), srcValue, (key + ''), object, source, stack) + : undefined; + + if (newValue === undefined) { + newValue = srcValue; + } + assignMergeValue(object, key, newValue); + } + }, keysIn); + } + + /** + * A specialized version of `baseMerge` for arrays and objects which performs + * deep merges and tracks traversed objects enabling objects with circular + * references to be merged. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {string} key The key of the value to merge. + * @param {number} srcIndex The index of `source`. + * @param {Function} mergeFunc The function to merge values. + * @param {Function} [customizer] The function to customize assigned values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ + function baseMergeDeep(object, source, key, srcIndex, mergeFunc, customizer, stack) { + var objValue = safeGet(object, key), + srcValue = safeGet(source, key), + stacked = stack.get(srcValue); + + if (stacked) { + assignMergeValue(object, key, stacked); + return; + } + var newValue = customizer + ? customizer(objValue, srcValue, (key + ''), object, source, stack) + : undefined; + + var isCommon = newValue === undefined; + + if (isCommon) { + var isArr = isArray(srcValue), + isBuff = !isArr && isBuffer(srcValue), + isTyped = !isArr && !isBuff && isTypedArray(srcValue); + + newValue = srcValue; + if (isArr || isBuff || isTyped) { + if (isArray(objValue)) { + newValue = objValue; + } + else if (isArrayLikeObject(objValue)) { + newValue = copyArray(objValue); + } + else if (isBuff) { + isCommon = false; + newValue = cloneBuffer(srcValue, true); + } + else if (isTyped) { + isCommon = false; + newValue = cloneTypedArray(srcValue, true); + } + else { + newValue = []; + } + } + else if (isPlainObject(srcValue) || isArguments(srcValue)) { + newValue = objValue; + if (isArguments(objValue)) { + newValue = toPlainObject(objValue); + } + else if (!isObject(objValue) || (srcIndex && isFunction(objValue))) { + newValue = initCloneObject(srcValue); + } + } + else { + isCommon = false; + } + } + if (isCommon) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, newValue); + mergeFunc(newValue, srcValue, srcIndex, customizer, stack); + stack['delete'](srcValue); + } + assignMergeValue(object, key, newValue); + } + + /** + * The base implementation of `_.nth` which doesn't coerce arguments. + * + * @private + * @param {Array} array The array to query. + * @param {number} n The index of the element to return. + * @returns {*} Returns the nth element of `array`. + */ + function baseNth(array, n) { + var length = array.length; + if (!length) { + return; + } + n += n < 0 ? length : 0; + return isIndex(n, length) ? array[n] : undefined; + } + + /** + * The base implementation of `_.orderBy` without param guards. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function[]|Object[]|string[]} iteratees The iteratees to sort by. + * @param {string[]} orders The sort orders of `iteratees`. + * @returns {Array} Returns the new sorted array. + */ + function baseOrderBy(collection, iteratees, orders) { + var index = -1; + iteratees = arrayMap(iteratees.length ? iteratees : [identity], baseUnary(getIteratee())); + + var result = baseMap(collection, function(value, key, collection) { + var criteria = arrayMap(iteratees, function(iteratee) { + return iteratee(value); + }); + return { 'criteria': criteria, 'index': ++index, 'value': value }; + }); + + return baseSortBy(result, function(object, other) { + return compareMultiple(object, other, orders); + }); + } + + /** + * The base implementation of `_.pick` without support for individual + * property identifiers. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @returns {Object} Returns the new object. + */ + function basePick(object, paths) { + return basePickBy(object, paths, function(value, path) { + return hasIn(object, path); + }); + } + + /** + * The base implementation of `_.pickBy` without support for iteratee shorthands. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @param {Function} predicate The function invoked per property. + * @returns {Object} Returns the new object. + */ + function basePickBy(object, paths, predicate) { + var index = -1, + length = paths.length, + result = {}; + + while (++index < length) { + var path = paths[index], + value = baseGet(object, path); + + if (predicate(value, path)) { + baseSet(result, castPath(path, object), value); + } + } + return result; + } + + /** + * A specialized version of `baseProperty` which supports deep paths. + * + * @private + * @param {Array|string} path The path of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyDeep(path) { + return function(object) { + return baseGet(object, path); + }; + } + + /** + * The base implementation of `_.pullAllBy` without support for iteratee + * shorthands. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + */ + function basePullAll(array, values, iteratee, comparator) { + var indexOf = comparator ? baseIndexOfWith : baseIndexOf, + index = -1, + length = values.length, + seen = array; + + if (array === values) { + values = copyArray(values); + } + if (iteratee) { + seen = arrayMap(array, baseUnary(iteratee)); + } + while (++index < length) { + var fromIndex = 0, + value = values[index], + computed = iteratee ? iteratee(value) : value; + + while ((fromIndex = indexOf(seen, computed, fromIndex, comparator)) > -1) { + if (seen !== array) { + splice.call(seen, fromIndex, 1); + } + splice.call(array, fromIndex, 1); + } + } + return array; + } + + /** + * The base implementation of `_.pullAt` without support for individual + * indexes or capturing the removed elements. + * + * @private + * @param {Array} array The array to modify. + * @param {number[]} indexes The indexes of elements to remove. + * @returns {Array} Returns `array`. + */ + function basePullAt(array, indexes) { + var length = array ? indexes.length : 0, + lastIndex = length - 1; + + while (length--) { + var index = indexes[length]; + if (length == lastIndex || index !== previous) { + var previous = index; + if (isIndex(index)) { + splice.call(array, index, 1); + } else { + baseUnset(array, index); + } + } + } + return array; + } + + /** + * The base implementation of `_.random` without support for returning + * floating-point numbers. + * + * @private + * @param {number} lower The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the random number. + */ + function baseRandom(lower, upper) { + return lower + nativeFloor(nativeRandom() * (upper - lower + 1)); + } + + /** + * The base implementation of `_.range` and `_.rangeRight` which doesn't + * coerce arguments. + * + * @private + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @param {number} step The value to increment or decrement by. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the range of numbers. + */ + function baseRange(start, end, step, fromRight) { + var index = -1, + length = nativeMax(nativeCeil((end - start) / (step || 1)), 0), + result = Array(length); + + while (length--) { + result[fromRight ? length : ++index] = start; + start += step; + } + return result; + } + + /** + * The base implementation of `_.repeat` which doesn't coerce arguments. + * + * @private + * @param {string} string The string to repeat. + * @param {number} n The number of times to repeat the string. + * @returns {string} Returns the repeated string. + */ + function baseRepeat(string, n) { + var result = ''; + if (!string || n < 1 || n > MAX_SAFE_INTEGER) { + return result; + } + // Leverage the exponentiation by squaring algorithm for a faster repeat. + // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details. + do { + if (n % 2) { + result += string; + } + n = nativeFloor(n / 2); + if (n) { + string += string; + } + } while (n); + + return result; + } + + /** + * The base implementation of `_.rest` which doesn't validate or coerce arguments. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + */ + function baseRest(func, start) { + return setToString(overRest(func, start, identity), func + ''); + } + + /** + * The base implementation of `_.sample`. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + */ + function baseSample(collection) { + return arraySample(values(collection)); + } + + /** + * The base implementation of `_.sampleSize` without param guards. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ + function baseSampleSize(collection, n) { + var array = values(collection); + return shuffleSelf(array, baseClamp(n, 0, array.length)); + } + + /** + * The base implementation of `_.set`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ + function baseSet(object, path, value, customizer) { + if (!isObject(object)) { + return object; + } + path = castPath(path, object); + + var index = -1, + length = path.length, + lastIndex = length - 1, + nested = object; + + while (nested != null && ++index < length) { + var key = toKey(path[index]), + newValue = value; + + if (index != lastIndex) { + var objValue = nested[key]; + newValue = customizer ? customizer(objValue, key, nested) : undefined; + if (newValue === undefined) { + newValue = isObject(objValue) + ? objValue + : (isIndex(path[index + 1]) ? [] : {}); + } + } + assignValue(nested, key, newValue); + nested = nested[key]; + } + return object; + } + + /** + * The base implementation of `setData` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ + var baseSetData = !metaMap ? identity : function(func, data) { + metaMap.set(func, data); + return func; + }; + + /** + * The base implementation of `setToString` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var baseSetToString = !defineProperty ? identity : function(func, string) { + return defineProperty(func, 'toString', { + 'configurable': true, + 'enumerable': false, + 'value': constant(string), + 'writable': true + }); + }; + + /** + * The base implementation of `_.shuffle`. + * + * @private + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + */ + function baseShuffle(collection) { + return shuffleSelf(values(collection)); + } + + /** + * The base implementation of `_.slice` without an iteratee call guard. + * + * @private + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function baseSlice(array, start, end) { + var index = -1, + length = array.length; + + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = end > length ? length : end; + if (end < 0) { + end += length; + } + length = start > end ? 0 : ((end - start) >>> 0); + start >>>= 0; + + var result = Array(length); + while (++index < length) { + result[index] = array[index + start]; + } + return result; + } + + /** + * The base implementation of `_.some` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function baseSome(collection, predicate) { + var result; + + baseEach(collection, function(value, index, collection) { + result = predicate(value, index, collection); + return !result; + }); + return !!result; + } + + /** + * The base implementation of `_.sortedIndex` and `_.sortedLastIndex` which + * performs a binary search of `array` to determine the index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ + function baseSortedIndex(array, value, retHighest) { + var low = 0, + high = array == null ? low : array.length; + + if (typeof value == 'number' && value === value && high <= HALF_MAX_ARRAY_LENGTH) { + while (low < high) { + var mid = (low + high) >>> 1, + computed = array[mid]; + + if (computed !== null && !isSymbol(computed) && + (retHighest ? (computed <= value) : (computed < value))) { + low = mid + 1; + } else { + high = mid; + } + } + return high; + } + return baseSortedIndexBy(array, value, identity, retHighest); + } + + /** + * The base implementation of `_.sortedIndexBy` and `_.sortedLastIndexBy` + * which invokes `iteratee` for `value` and each element of `array` to compute + * their sort ranking. The iteratee is invoked with one argument; (value). + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} iteratee The iteratee invoked per element. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ + function baseSortedIndexBy(array, value, iteratee, retHighest) { + value = iteratee(value); + + var low = 0, + high = array == null ? 0 : array.length, + valIsNaN = value !== value, + valIsNull = value === null, + valIsSymbol = isSymbol(value), + valIsUndefined = value === undefined; + + while (low < high) { + var mid = nativeFloor((low + high) / 2), + computed = iteratee(array[mid]), + othIsDefined = computed !== undefined, + othIsNull = computed === null, + othIsReflexive = computed === computed, + othIsSymbol = isSymbol(computed); + + if (valIsNaN) { + var setLow = retHighest || othIsReflexive; + } else if (valIsUndefined) { + setLow = othIsReflexive && (retHighest || othIsDefined); + } else if (valIsNull) { + setLow = othIsReflexive && othIsDefined && (retHighest || !othIsNull); + } else if (valIsSymbol) { + setLow = othIsReflexive && othIsDefined && !othIsNull && (retHighest || !othIsSymbol); + } else if (othIsNull || othIsSymbol) { + setLow = false; + } else { + setLow = retHighest ? (computed <= value) : (computed < value); + } + if (setLow) { + low = mid + 1; + } else { + high = mid; + } + } + return nativeMin(high, MAX_ARRAY_INDEX); + } + + /** + * The base implementation of `_.sortedUniq` and `_.sortedUniqBy` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ + function baseSortedUniq(array, iteratee) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + if (!index || !eq(computed, seen)) { + var seen = computed; + result[resIndex++] = value === 0 ? 0 : value; + } + } + return result; + } + + /** + * The base implementation of `_.toNumber` which doesn't ensure correct + * conversions of binary, hexadecimal, or octal string values. + * + * @private + * @param {*} value The value to process. + * @returns {number} Returns the number. + */ + function baseToNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + return +value; + } + + /** + * The base implementation of `_.toString` which doesn't convert nullish + * values to empty strings. + * + * @private + * @param {*} value The value to process. + * @returns {string} Returns the string. + */ + function baseToString(value) { + // Exit early for strings to avoid a performance hit in some environments. + if (typeof value == 'string') { + return value; + } + if (isArray(value)) { + // Recursively convert values (susceptible to call stack limits). + return arrayMap(value, baseToString) + ''; + } + if (isSymbol(value)) { + return symbolToString ? symbolToString.call(value) : ''; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + } + + /** + * The base implementation of `_.uniqBy` without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ + function baseUniq(array, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + length = array.length, + isCommon = true, + result = [], + seen = result; + + if (comparator) { + isCommon = false; + includes = arrayIncludesWith; + } + else if (length >= LARGE_ARRAY_SIZE) { + var set = iteratee ? null : createSet(array); + if (set) { + return setToArray(set); + } + isCommon = false; + includes = cacheHas; + seen = new SetCache; + } + else { + seen = iteratee ? [] : result; + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var seenIndex = seen.length; + while (seenIndex--) { + if (seen[seenIndex] === computed) { + continue outer; + } + } + if (iteratee) { + seen.push(computed); + } + result.push(value); + } + else if (!includes(seen, computed, comparator)) { + if (seen !== result) { + seen.push(computed); + } + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.unset`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The property path to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + */ + function baseUnset(object, path) { + path = castPath(path, object); + object = parent(object, path); + return object == null || delete object[toKey(last(path))]; + } + + /** + * The base implementation of `_.update`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to update. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ + function baseUpdate(object, path, updater, customizer) { + return baseSet(object, path, updater(baseGet(object, path)), customizer); + } + + /** + * The base implementation of methods like `_.dropWhile` and `_.takeWhile` + * without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to query. + * @param {Function} predicate The function invoked per iteration. + * @param {boolean} [isDrop] Specify dropping elements instead of taking them. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the slice of `array`. + */ + function baseWhile(array, predicate, isDrop, fromRight) { + var length = array.length, + index = fromRight ? length : -1; + + while ((fromRight ? index-- : ++index < length) && + predicate(array[index], index, array)) {} + + return isDrop + ? baseSlice(array, (fromRight ? 0 : index), (fromRight ? index + 1 : length)) + : baseSlice(array, (fromRight ? index + 1 : 0), (fromRight ? length : index)); + } + + /** + * The base implementation of `wrapperValue` which returns the result of + * performing a sequence of actions on the unwrapped `value`, where each + * successive action is supplied the return value of the previous. + * + * @private + * @param {*} value The unwrapped value. + * @param {Array} actions Actions to perform to resolve the unwrapped value. + * @returns {*} Returns the resolved value. + */ + function baseWrapperValue(value, actions) { + var result = value; + if (result instanceof LazyWrapper) { + result = result.value(); + } + return arrayReduce(actions, function(result, action) { + return action.func.apply(action.thisArg, arrayPush([result], action.args)); + }, result); + } + + /** + * The base implementation of methods like `_.xor`, without support for + * iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of values. + */ + function baseXor(arrays, iteratee, comparator) { + var length = arrays.length; + if (length < 2) { + return length ? baseUniq(arrays[0]) : []; + } + var index = -1, + result = Array(length); + + while (++index < length) { + var array = arrays[index], + othIndex = -1; + + while (++othIndex < length) { + if (othIndex != index) { + result[index] = baseDifference(result[index] || array, arrays[othIndex], iteratee, comparator); + } + } + } + return baseUniq(baseFlatten(result, 1), iteratee, comparator); + } + + /** + * This base implementation of `_.zipObject` which assigns values using `assignFunc`. + * + * @private + * @param {Array} props The property identifiers. + * @param {Array} values The property values. + * @param {Function} assignFunc The function to assign values. + * @returns {Object} Returns the new object. + */ + function baseZipObject(props, values, assignFunc) { + var index = -1, + length = props.length, + valsLength = values.length, + result = {}; + + while (++index < length) { + var value = index < valsLength ? values[index] : undefined; + assignFunc(result, props[index], value); + } + return result; + } + + /** + * Casts `value` to an empty array if it's not an array like object. + * + * @private + * @param {*} value The value to inspect. + * @returns {Array|Object} Returns the cast array-like object. + */ + function castArrayLikeObject(value) { + return isArrayLikeObject(value) ? value : []; + } + + /** + * Casts `value` to `identity` if it's not a function. + * + * @private + * @param {*} value The value to inspect. + * @returns {Function} Returns cast function. + */ + function castFunction(value) { + return typeof value == 'function' ? value : identity; + } + + /** + * Casts `value` to a path array if it's not one. + * + * @private + * @param {*} value The value to inspect. + * @param {Object} [object] The object to query keys on. + * @returns {Array} Returns the cast property path array. + */ + function castPath(value, object) { + if (isArray(value)) { + return value; + } + return isKey(value, object) ? [value] : stringToPath(toString(value)); + } + + /** + * A `baseRest` alias which can be replaced with `identity` by module + * replacement plugins. + * + * @private + * @type {Function} + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + var castRest = baseRest; + + /** + * Casts `array` to a slice if it's needed. + * + * @private + * @param {Array} array The array to inspect. + * @param {number} start The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the cast slice. + */ + function castSlice(array, start, end) { + var length = array.length; + end = end === undefined ? length : end; + return (!start && end >= length) ? array : baseSlice(array, start, end); + } + + /** + * A simple wrapper around the global [`clearTimeout`](https://mdn.io/clearTimeout). + * + * @private + * @param {number|Object} id The timer id or timeout object of the timer to clear. + */ + var clearTimeout = ctxClearTimeout || function(id) { + return root.clearTimeout(id); + }; + + /** + * Creates a clone of `buffer`. + * + * @private + * @param {Buffer} buffer The buffer to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Buffer} Returns the cloned buffer. + */ + function cloneBuffer(buffer, isDeep) { + if (isDeep) { + return buffer.slice(); + } + var length = buffer.length, + result = allocUnsafe ? allocUnsafe(length) : new buffer.constructor(length); + + buffer.copy(result); + return result; + } + + /** + * Creates a clone of `arrayBuffer`. + * + * @private + * @param {ArrayBuffer} arrayBuffer The array buffer to clone. + * @returns {ArrayBuffer} Returns the cloned array buffer. + */ + function cloneArrayBuffer(arrayBuffer) { + var result = new arrayBuffer.constructor(arrayBuffer.byteLength); + new Uint8Array(result).set(new Uint8Array(arrayBuffer)); + return result; + } + + /** + * Creates a clone of `dataView`. + * + * @private + * @param {Object} dataView The data view to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned data view. + */ + function cloneDataView(dataView, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(dataView.buffer) : dataView.buffer; + return new dataView.constructor(buffer, dataView.byteOffset, dataView.byteLength); + } + + /** + * Creates a clone of `regexp`. + * + * @private + * @param {Object} regexp The regexp to clone. + * @returns {Object} Returns the cloned regexp. + */ + function cloneRegExp(regexp) { + var result = new regexp.constructor(regexp.source, reFlags.exec(regexp)); + result.lastIndex = regexp.lastIndex; + return result; + } + + /** + * Creates a clone of the `symbol` object. + * + * @private + * @param {Object} symbol The symbol object to clone. + * @returns {Object} Returns the cloned symbol object. + */ + function cloneSymbol(symbol) { + return symbolValueOf ? Object(symbolValueOf.call(symbol)) : {}; + } + + /** + * Creates a clone of `typedArray`. + * + * @private + * @param {Object} typedArray The typed array to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned typed array. + */ + function cloneTypedArray(typedArray, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(typedArray.buffer) : typedArray.buffer; + return new typedArray.constructor(buffer, typedArray.byteOffset, typedArray.length); + } + + /** + * Compares values to sort them in ascending order. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {number} Returns the sort order indicator for `value`. + */ + function compareAscending(value, other) { + if (value !== other) { + var valIsDefined = value !== undefined, + valIsNull = value === null, + valIsReflexive = value === value, + valIsSymbol = isSymbol(value); + + var othIsDefined = other !== undefined, + othIsNull = other === null, + othIsReflexive = other === other, + othIsSymbol = isSymbol(other); + + if ((!othIsNull && !othIsSymbol && !valIsSymbol && value > other) || + (valIsSymbol && othIsDefined && othIsReflexive && !othIsNull && !othIsSymbol) || + (valIsNull && othIsDefined && othIsReflexive) || + (!valIsDefined && othIsReflexive) || + !valIsReflexive) { + return 1; + } + if ((!valIsNull && !valIsSymbol && !othIsSymbol && value < other) || + (othIsSymbol && valIsDefined && valIsReflexive && !valIsNull && !valIsSymbol) || + (othIsNull && valIsDefined && valIsReflexive) || + (!othIsDefined && valIsReflexive) || + !othIsReflexive) { + return -1; + } + } + return 0; + } + + /** + * Used by `_.orderBy` to compare multiple properties of a value to another + * and stable sort them. + * + * If `orders` is unspecified, all values are sorted in ascending order. Otherwise, + * specify an order of "desc" for descending or "asc" for ascending sort order + * of corresponding values. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {boolean[]|string[]} orders The order to sort by for each property. + * @returns {number} Returns the sort order indicator for `object`. + */ + function compareMultiple(object, other, orders) { + var index = -1, + objCriteria = object.criteria, + othCriteria = other.criteria, + length = objCriteria.length, + ordersLength = orders.length; + + while (++index < length) { + var result = compareAscending(objCriteria[index], othCriteria[index]); + if (result) { + if (index >= ordersLength) { + return result; + } + var order = orders[index]; + return result * (order == 'desc' ? -1 : 1); + } + } + // Fixes an `Array#sort` bug in the JS engine embedded in Adobe applications + // that causes it, under certain circumstances, to provide the same value for + // `object` and `other`. See https://github.com/jashkenas/underscore/pull/1247 + // for more details. + // + // This also ensures a stable sort in V8 and other engines. + // See https://bugs.chromium.org/p/v8/issues/detail?id=90 for more details. + return object.index - other.index; + } + + /** + * Creates an array that is the composition of partially applied arguments, + * placeholders, and provided arguments into a single array of arguments. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to prepend to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ + function composeArgs(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersLength = holders.length, + leftIndex = -1, + leftLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(leftLength + rangeLength), + isUncurried = !isCurried; + + while (++leftIndex < leftLength) { + result[leftIndex] = partials[leftIndex]; + } + while (++argsIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[holders[argsIndex]] = args[argsIndex]; + } + } + while (rangeLength--) { + result[leftIndex++] = args[argsIndex++]; + } + return result; + } + + /** + * This function is like `composeArgs` except that the arguments composition + * is tailored for `_.partialRight`. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to append to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ + function composeArgsRight(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersIndex = -1, + holdersLength = holders.length, + rightIndex = -1, + rightLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(rangeLength + rightLength), + isUncurried = !isCurried; + + while (++argsIndex < rangeLength) { + result[argsIndex] = args[argsIndex]; + } + var offset = argsIndex; + while (++rightIndex < rightLength) { + result[offset + rightIndex] = partials[rightIndex]; + } + while (++holdersIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[offset + holders[holdersIndex]] = args[argsIndex++]; + } + } + return result; + } + + /** + * Copies the values of `source` to `array`. + * + * @private + * @param {Array} source The array to copy values from. + * @param {Array} [array=[]] The array to copy values to. + * @returns {Array} Returns `array`. + */ + function copyArray(source, array) { + var index = -1, + length = source.length; + + array || (array = Array(length)); + while (++index < length) { + array[index] = source[index]; + } + return array; + } + + /** + * Copies properties of `source` to `object`. + * + * @private + * @param {Object} source The object to copy properties from. + * @param {Array} props The property identifiers to copy. + * @param {Object} [object={}] The object to copy properties to. + * @param {Function} [customizer] The function to customize copied values. + * @returns {Object} Returns `object`. + */ + function copyObject(source, props, object, customizer) { + var isNew = !object; + object || (object = {}); + + var index = -1, + length = props.length; + + while (++index < length) { + var key = props[index]; + + var newValue = customizer + ? customizer(object[key], source[key], key, object, source) + : undefined; + + if (newValue === undefined) { + newValue = source[key]; + } + if (isNew) { + baseAssignValue(object, key, newValue); + } else { + assignValue(object, key, newValue); + } + } + return object; + } + + /** + * Copies own symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ + function copySymbols(source, object) { + return copyObject(source, getSymbols(source), object); + } + + /** + * Copies own and inherited symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ + function copySymbolsIn(source, object) { + return copyObject(source, getSymbolsIn(source), object); + } + + /** + * Creates a function like `_.groupBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} [initializer] The accumulator object initializer. + * @returns {Function} Returns the new aggregator function. + */ + function createAggregator(setter, initializer) { + return function(collection, iteratee) { + var func = isArray(collection) ? arrayAggregator : baseAggregator, + accumulator = initializer ? initializer() : {}; + + return func(collection, setter, getIteratee(iteratee, 2), accumulator); + }; + } + + /** + * Creates a function like `_.assign`. + * + * @private + * @param {Function} assigner The function to assign values. + * @returns {Function} Returns the new assigner function. + */ + function createAssigner(assigner) { + return baseRest(function(object, sources) { + var index = -1, + length = sources.length, + customizer = length > 1 ? sources[length - 1] : undefined, + guard = length > 2 ? sources[2] : undefined; + + customizer = (assigner.length > 3 && typeof customizer == 'function') + ? (length--, customizer) + : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + customizer = length < 3 ? undefined : customizer; + length = 1; + } + object = Object(object); + while (++index < length) { + var source = sources[index]; + if (source) { + assigner(object, source, index, customizer); + } + } + return object; + }); + } + + /** + * Creates a `baseEach` or `baseEachRight` function. + * + * @private + * @param {Function} eachFunc The function to iterate over a collection. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseEach(eachFunc, fromRight) { + return function(collection, iteratee) { + if (collection == null) { + return collection; + } + if (!isArrayLike(collection)) { + return eachFunc(collection, iteratee); + } + var length = collection.length, + index = fromRight ? length : -1, + iterable = Object(collection); + + while ((fromRight ? index-- : ++index < length)) { + if (iteratee(iterable[index], index, iterable) === false) { + break; + } + } + return collection; + }; + } + + /** + * Creates a base function for methods like `_.forIn` and `_.forOwn`. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseFor(fromRight) { + return function(object, iteratee, keysFunc) { + var index = -1, + iterable = Object(object), + props = keysFunc(object), + length = props.length; + + while (length--) { + var key = props[fromRight ? length : ++index]; + if (iteratee(iterable[key], key, iterable) === false) { + break; + } + } + return object; + }; + } + + /** + * Creates a function that wraps `func` to invoke it with the optional `this` + * binding of `thisArg`. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createBind(func, bitmask, thisArg) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return fn.apply(isBind ? thisArg : this, arguments); + } + return wrapper; + } + + /** + * Creates a function like `_.lowerFirst`. + * + * @private + * @param {string} methodName The name of the `String` case method to use. + * @returns {Function} Returns the new case function. + */ + function createCaseFirst(methodName) { + return function(string) { + string = toString(string); + + var strSymbols = hasUnicode(string) + ? stringToArray(string) + : undefined; + + var chr = strSymbols + ? strSymbols[0] + : string.charAt(0); + + var trailing = strSymbols + ? castSlice(strSymbols, 1).join('') + : string.slice(1); + + return chr[methodName]() + trailing; + }; + } + + /** + * Creates a function like `_.camelCase`. + * + * @private + * @param {Function} callback The function to combine each word. + * @returns {Function} Returns the new compounder function. + */ + function createCompounder(callback) { + return function(string) { + return arrayReduce(words(deburr(string).replace(reApos, '')), callback, ''); + }; + } + + /** + * Creates a function that produces an instance of `Ctor` regardless of + * whether it was invoked as part of a `new` expression or by `call` or `apply`. + * + * @private + * @param {Function} Ctor The constructor to wrap. + * @returns {Function} Returns the new wrapped function. + */ + function createCtor(Ctor) { + return function() { + // Use a `switch` statement to work with class constructors. See + // http://ecma-international.org/ecma-262/7.0/#sec-ecmascript-function-objects-call-thisargument-argumentslist + // for more details. + var args = arguments; + switch (args.length) { + case 0: return new Ctor; + case 1: return new Ctor(args[0]); + case 2: return new Ctor(args[0], args[1]); + case 3: return new Ctor(args[0], args[1], args[2]); + case 4: return new Ctor(args[0], args[1], args[2], args[3]); + case 5: return new Ctor(args[0], args[1], args[2], args[3], args[4]); + case 6: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5]); + case 7: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5], args[6]); + } + var thisBinding = baseCreate(Ctor.prototype), + result = Ctor.apply(thisBinding, args); + + // Mimic the constructor's `return` behavior. + // See https://es5.github.io/#x13.2.2 for more details. + return isObject(result) ? result : thisBinding; + }; + } + + /** + * Creates a function that wraps `func` to enable currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {number} arity The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createCurry(func, bitmask, arity) { + var Ctor = createCtor(func); + + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length, + placeholder = getHolder(wrapper); + + while (index--) { + args[index] = arguments[index]; + } + var holders = (length < 3 && args[0] !== placeholder && args[length - 1] !== placeholder) + ? [] + : replaceHolders(args, placeholder); + + length -= holders.length; + if (length < arity) { + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, undefined, + args, holders, undefined, undefined, arity - length); + } + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return apply(fn, this, args); + } + return wrapper; + } + + /** + * Creates a `_.find` or `_.findLast` function. + * + * @private + * @param {Function} findIndexFunc The function to find the collection index. + * @returns {Function} Returns the new find function. + */ + function createFind(findIndexFunc) { + return function(collection, predicate, fromIndex) { + var iterable = Object(collection); + if (!isArrayLike(collection)) { + var iteratee = getIteratee(predicate, 3); + collection = keys(collection); + predicate = function(key) { return iteratee(iterable[key], key, iterable); }; + } + var index = findIndexFunc(collection, predicate, fromIndex); + return index > -1 ? iterable[iteratee ? collection[index] : index] : undefined; + }; + } + + /** + * Creates a `_.flow` or `_.flowRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new flow function. + */ + function createFlow(fromRight) { + return flatRest(function(funcs) { + var length = funcs.length, + index = length, + prereq = LodashWrapper.prototype.thru; + + if (fromRight) { + funcs.reverse(); + } + while (index--) { + var func = funcs[index]; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (prereq && !wrapper && getFuncName(func) == 'wrapper') { + var wrapper = new LodashWrapper([], true); + } + } + index = wrapper ? index : length; + while (++index < length) { + func = funcs[index]; + + var funcName = getFuncName(func), + data = funcName == 'wrapper' ? getData(func) : undefined; + + if (data && isLaziable(data[0]) && + data[1] == (WRAP_ARY_FLAG | WRAP_CURRY_FLAG | WRAP_PARTIAL_FLAG | WRAP_REARG_FLAG) && + !data[4].length && data[9] == 1 + ) { + wrapper = wrapper[getFuncName(data[0])].apply(wrapper, data[3]); + } else { + wrapper = (func.length == 1 && isLaziable(func)) + ? wrapper[funcName]() + : wrapper.thru(func); + } + } + return function() { + var args = arguments, + value = args[0]; + + if (wrapper && args.length == 1 && isArray(value)) { + return wrapper.plant(value).value(); + } + var index = 0, + result = length ? funcs[index].apply(this, args) : value; + + while (++index < length) { + result = funcs[index].call(this, result); + } + return result; + }; + }); + } + + /** + * Creates a function that wraps `func` to invoke it with optional `this` + * binding of `thisArg`, partial application, and currying. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [partialsRight] The arguments to append to those provided + * to the new function. + * @param {Array} [holdersRight] The `partialsRight` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createHybrid(func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, argPos, ary, arity) { + var isAry = bitmask & WRAP_ARY_FLAG, + isBind = bitmask & WRAP_BIND_FLAG, + isBindKey = bitmask & WRAP_BIND_KEY_FLAG, + isCurried = bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG), + isFlip = bitmask & WRAP_FLIP_FLAG, + Ctor = isBindKey ? undefined : createCtor(func); + + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length; + + while (index--) { + args[index] = arguments[index]; + } + if (isCurried) { + var placeholder = getHolder(wrapper), + holdersCount = countHolders(args, placeholder); + } + if (partials) { + args = composeArgs(args, partials, holders, isCurried); + } + if (partialsRight) { + args = composeArgsRight(args, partialsRight, holdersRight, isCurried); + } + length -= holdersCount; + if (isCurried && length < arity) { + var newHolders = replaceHolders(args, placeholder); + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, thisArg, + args, newHolders, argPos, ary, arity - length + ); + } + var thisBinding = isBind ? thisArg : this, + fn = isBindKey ? thisBinding[func] : func; + + length = args.length; + if (argPos) { + args = reorder(args, argPos); + } else if (isFlip && length > 1) { + args.reverse(); + } + if (isAry && ary < length) { + args.length = ary; + } + if (this && this !== root && this instanceof wrapper) { + fn = Ctor || createCtor(fn); + } + return fn.apply(thisBinding, args); + } + return wrapper; + } + + /** + * Creates a function like `_.invertBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} toIteratee The function to resolve iteratees. + * @returns {Function} Returns the new inverter function. + */ + function createInverter(setter, toIteratee) { + return function(object, iteratee) { + return baseInverter(object, setter, toIteratee(iteratee), {}); + }; + } + + /** + * Creates a function that performs a mathematical operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @param {number} [defaultValue] The value used for `undefined` arguments. + * @returns {Function} Returns the new mathematical operation function. + */ + function createMathOperation(operator, defaultValue) { + return function(value, other) { + var result; + if (value === undefined && other === undefined) { + return defaultValue; + } + if (value !== undefined) { + result = value; + } + if (other !== undefined) { + if (result === undefined) { + return other; + } + if (typeof value == 'string' || typeof other == 'string') { + value = baseToString(value); + other = baseToString(other); + } else { + value = baseToNumber(value); + other = baseToNumber(other); + } + result = operator(value, other); + } + return result; + }; + } + + /** + * Creates a function like `_.over`. + * + * @private + * @param {Function} arrayFunc The function to iterate over iteratees. + * @returns {Function} Returns the new over function. + */ + function createOver(arrayFunc) { + return flatRest(function(iteratees) { + iteratees = arrayMap(iteratees, baseUnary(getIteratee())); + return baseRest(function(args) { + var thisArg = this; + return arrayFunc(iteratees, function(iteratee) { + return apply(iteratee, thisArg, args); + }); + }); + }); + } + + /** + * Creates the padding for `string` based on `length`. The `chars` string + * is truncated if the number of characters exceeds `length`. + * + * @private + * @param {number} length The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padding for `string`. + */ + function createPadding(length, chars) { + chars = chars === undefined ? ' ' : baseToString(chars); + + var charsLength = chars.length; + if (charsLength < 2) { + return charsLength ? baseRepeat(chars, length) : chars; + } + var result = baseRepeat(chars, nativeCeil(length / stringSize(chars))); + return hasUnicode(chars) + ? castSlice(stringToArray(result), 0, length).join('') + : result.slice(0, length); + } + + /** + * Creates a function that wraps `func` to invoke it with the `this` binding + * of `thisArg` and `partials` prepended to the arguments it receives. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} partials The arguments to prepend to those provided to + * the new function. + * @returns {Function} Returns the new wrapped function. + */ + function createPartial(func, bitmask, thisArg, partials) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var argsIndex = -1, + argsLength = arguments.length, + leftIndex = -1, + leftLength = partials.length, + args = Array(leftLength + argsLength), + fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + + while (++leftIndex < leftLength) { + args[leftIndex] = partials[leftIndex]; + } + while (argsLength--) { + args[leftIndex++] = arguments[++argsIndex]; + } + return apply(fn, isBind ? thisArg : this, args); + } + return wrapper; + } + + /** + * Creates a `_.range` or `_.rangeRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new range function. + */ + function createRange(fromRight) { + return function(start, end, step) { + if (step && typeof step != 'number' && isIterateeCall(start, end, step)) { + end = step = undefined; + } + // Ensure the sign of `-0` is preserved. + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + step = step === undefined ? (start < end ? 1 : -1) : toFinite(step); + return baseRange(start, end, step, fromRight); + }; + } + + /** + * Creates a function that performs a relational operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @returns {Function} Returns the new relational operation function. + */ + function createRelationalOperation(operator) { + return function(value, other) { + if (!(typeof value == 'string' && typeof other == 'string')) { + value = toNumber(value); + other = toNumber(other); + } + return operator(value, other); + }; + } + + /** + * Creates a function that wraps `func` to continue currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {Function} wrapFunc The function to create the `func` wrapper. + * @param {*} placeholder The placeholder value. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createRecurry(func, bitmask, wrapFunc, placeholder, thisArg, partials, holders, argPos, ary, arity) { + var isCurry = bitmask & WRAP_CURRY_FLAG, + newHolders = isCurry ? holders : undefined, + newHoldersRight = isCurry ? undefined : holders, + newPartials = isCurry ? partials : undefined, + newPartialsRight = isCurry ? undefined : partials; + + bitmask |= (isCurry ? WRAP_PARTIAL_FLAG : WRAP_PARTIAL_RIGHT_FLAG); + bitmask &= ~(isCurry ? WRAP_PARTIAL_RIGHT_FLAG : WRAP_PARTIAL_FLAG); + + if (!(bitmask & WRAP_CURRY_BOUND_FLAG)) { + bitmask &= ~(WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG); + } + var newData = [ + func, bitmask, thisArg, newPartials, newHolders, newPartialsRight, + newHoldersRight, argPos, ary, arity + ]; + + var result = wrapFunc.apply(undefined, newData); + if (isLaziable(func)) { + setData(result, newData); + } + result.placeholder = placeholder; + return setWrapToString(result, func, bitmask); + } + + /** + * Creates a function like `_.round`. + * + * @private + * @param {string} methodName The name of the `Math` method to use when rounding. + * @returns {Function} Returns the new round function. + */ + function createRound(methodName) { + var func = Math[methodName]; + return function(number, precision) { + number = toNumber(number); + precision = precision == null ? 0 : nativeMin(toInteger(precision), 292); + if (precision) { + // Shift with exponential notation to avoid floating-point issues. + // See [MDN](https://mdn.io/round#Examples) for more details. + var pair = (toString(number) + 'e').split('e'), + value = func(pair[0] + 'e' + (+pair[1] + precision)); + + pair = (toString(value) + 'e').split('e'); + return +(pair[0] + 'e' + (+pair[1] - precision)); + } + return func(number); + }; + } + + /** + * Creates a set object of `values`. + * + * @private + * @param {Array} values The values to add to the set. + * @returns {Object} Returns the new set. + */ + var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { + return new Set(values); + }; + + /** + * Creates a `_.toPairs` or `_.toPairsIn` function. + * + * @private + * @param {Function} keysFunc The function to get the keys of a given object. + * @returns {Function} Returns the new pairs function. + */ + function createToPairs(keysFunc) { + return function(object) { + var tag = getTag(object); + if (tag == mapTag) { + return mapToArray(object); + } + if (tag == setTag) { + return setToPairs(object); + } + return baseToPairs(object, keysFunc(object)); + }; + } + + /** + * Creates a function that either curries or invokes `func` with optional + * `this` binding and partially applied arguments. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. + * 1 - `_.bind` + * 2 - `_.bindKey` + * 4 - `_.curry` or `_.curryRight` of a bound function + * 8 - `_.curry` + * 16 - `_.curryRight` + * 32 - `_.partial` + * 64 - `_.partialRight` + * 128 - `_.rearg` + * 256 - `_.ary` + * 512 - `_.flip` + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to be partially applied. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createWrap(func, bitmask, thisArg, partials, holders, argPos, ary, arity) { + var isBindKey = bitmask & WRAP_BIND_KEY_FLAG; + if (!isBindKey && typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + var length = partials ? partials.length : 0; + if (!length) { + bitmask &= ~(WRAP_PARTIAL_FLAG | WRAP_PARTIAL_RIGHT_FLAG); + partials = holders = undefined; + } + ary = ary === undefined ? ary : nativeMax(toInteger(ary), 0); + arity = arity === undefined ? arity : toInteger(arity); + length -= holders ? holders.length : 0; + + if (bitmask & WRAP_PARTIAL_RIGHT_FLAG) { + var partialsRight = partials, + holdersRight = holders; + + partials = holders = undefined; + } + var data = isBindKey ? undefined : getData(func); + + var newData = [ + func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, + argPos, ary, arity + ]; + + if (data) { + mergeData(newData, data); + } + func = newData[0]; + bitmask = newData[1]; + thisArg = newData[2]; + partials = newData[3]; + holders = newData[4]; + arity = newData[9] = newData[9] === undefined + ? (isBindKey ? 0 : func.length) + : nativeMax(newData[9] - length, 0); + + if (!arity && bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG)) { + bitmask &= ~(WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG); + } + if (!bitmask || bitmask == WRAP_BIND_FLAG) { + var result = createBind(func, bitmask, thisArg); + } else if (bitmask == WRAP_CURRY_FLAG || bitmask == WRAP_CURRY_RIGHT_FLAG) { + result = createCurry(func, bitmask, arity); + } else if ((bitmask == WRAP_PARTIAL_FLAG || bitmask == (WRAP_BIND_FLAG | WRAP_PARTIAL_FLAG)) && !holders.length) { + result = createPartial(func, bitmask, thisArg, partials); + } else { + result = createHybrid.apply(undefined, newData); + } + var setter = data ? baseSetData : setData; + return setWrapToString(setter(result, newData), func, bitmask); + } + + /** + * Used by `_.defaults` to customize its `_.assignIn` use to assign properties + * of source objects to the destination object for all destination properties + * that resolve to `undefined`. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to assign. + * @param {Object} object The parent object of `objValue`. + * @returns {*} Returns the value to assign. + */ + function customDefaultsAssignIn(objValue, srcValue, key, object) { + if (objValue === undefined || + (eq(objValue, objectProto[key]) && !hasOwnProperty.call(object, key))) { + return srcValue; + } + return objValue; + } + + /** + * Used by `_.defaultsDeep` to customize its `_.merge` use to merge source + * objects into destination objects that are passed thru. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to merge. + * @param {Object} object The parent object of `objValue`. + * @param {Object} source The parent object of `srcValue`. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + * @returns {*} Returns the value to assign. + */ + function customDefaultsMerge(objValue, srcValue, key, object, source, stack) { + if (isObject(objValue) && isObject(srcValue)) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, objValue); + baseMerge(objValue, srcValue, undefined, customDefaultsMerge, stack); + stack['delete'](srcValue); + } + return objValue; + } + + /** + * Used by `_.omit` to customize its `_.cloneDeep` use to only clone plain + * objects. + * + * @private + * @param {*} value The value to inspect. + * @param {string} key The key of the property to inspect. + * @returns {*} Returns the uncloned value or `undefined` to defer cloning to `_.cloneDeep`. + */ + function customOmitClone(value) { + return isPlainObject(value) ? undefined : value; + } + + /** + * A specialized version of `baseIsEqualDeep` for arrays with support for + * partial deep comparisons. + * + * @private + * @param {Array} array The array to compare. + * @param {Array} other The other array to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `array` and `other` objects. + * @returns {boolean} Returns `true` if the arrays are equivalent, else `false`. + */ + function equalArrays(array, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + arrLength = array.length, + othLength = other.length; + + if (arrLength != othLength && !(isPartial && othLength > arrLength)) { + return false; + } + // Assume cyclic values are equal. + var stacked = stack.get(array); + if (stacked && stack.get(other)) { + return stacked == other; + } + var index = -1, + result = true, + seen = (bitmask & COMPARE_UNORDERED_FLAG) ? new SetCache : undefined; + + stack.set(array, other); + stack.set(other, array); + + // Ignore non-index properties. + while (++index < arrLength) { + var arrValue = array[index], + othValue = other[index]; + + if (customizer) { + var compared = isPartial + ? customizer(othValue, arrValue, index, other, array, stack) + : customizer(arrValue, othValue, index, array, other, stack); + } + if (compared !== undefined) { + if (compared) { + continue; + } + result = false; + break; + } + // Recursively compare arrays (susceptible to call stack limits). + if (seen) { + if (!arraySome(other, function(othValue, othIndex) { + if (!cacheHas(seen, othIndex) && + (arrValue === othValue || equalFunc(arrValue, othValue, bitmask, customizer, stack))) { + return seen.push(othIndex); + } + })) { + result = false; + break; + } + } else if (!( + arrValue === othValue || + equalFunc(arrValue, othValue, bitmask, customizer, stack) + )) { + result = false; + break; + } + } + stack['delete'](array); + stack['delete'](other); + return result; + } + + /** + * A specialized version of `baseIsEqualDeep` for comparing objects of + * the same `toStringTag`. + * + * **Note:** This function only supports comparing values with tags of + * `Boolean`, `Date`, `Error`, `Number`, `RegExp`, or `String`. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {string} tag The `toStringTag` of the objects to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalByTag(object, other, tag, bitmask, customizer, equalFunc, stack) { + switch (tag) { + case dataViewTag: + if ((object.byteLength != other.byteLength) || + (object.byteOffset != other.byteOffset)) { + return false; + } + object = object.buffer; + other = other.buffer; + + case arrayBufferTag: + if ((object.byteLength != other.byteLength) || + !equalFunc(new Uint8Array(object), new Uint8Array(other))) { + return false; + } + return true; + + case boolTag: + case dateTag: + case numberTag: + // Coerce booleans to `1` or `0` and dates to milliseconds. + // Invalid dates are coerced to `NaN`. + return eq(+object, +other); + + case errorTag: + return object.name == other.name && object.message == other.message; + + case regexpTag: + case stringTag: + // Coerce regexes to strings and treat strings, primitives and objects, + // as equal. See http://www.ecma-international.org/ecma-262/7.0/#sec-regexp.prototype.tostring + // for more details. + return object == (other + ''); + + case mapTag: + var convert = mapToArray; + + case setTag: + var isPartial = bitmask & COMPARE_PARTIAL_FLAG; + convert || (convert = setToArray); + + if (object.size != other.size && !isPartial) { + return false; + } + // Assume cyclic values are equal. + var stacked = stack.get(object); + if (stacked) { + return stacked == other; + } + bitmask |= COMPARE_UNORDERED_FLAG; + + // Recursively compare objects (susceptible to call stack limits). + stack.set(object, other); + var result = equalArrays(convert(object), convert(other), bitmask, customizer, equalFunc, stack); + stack['delete'](object); + return result; + + case symbolTag: + if (symbolValueOf) { + return symbolValueOf.call(object) == symbolValueOf.call(other); + } + } + return false; + } + + /** + * A specialized version of `baseIsEqualDeep` for objects with support for + * partial deep comparisons. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalObjects(object, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + objProps = getAllKeys(object), + objLength = objProps.length, + othProps = getAllKeys(other), + othLength = othProps.length; + + if (objLength != othLength && !isPartial) { + return false; + } + var index = objLength; + while (index--) { + var key = objProps[index]; + if (!(isPartial ? key in other : hasOwnProperty.call(other, key))) { + return false; + } + } + // Assume cyclic values are equal. + var stacked = stack.get(object); + if (stacked && stack.get(other)) { + return stacked == other; + } + var result = true; + stack.set(object, other); + stack.set(other, object); + + var skipCtor = isPartial; + while (++index < objLength) { + key = objProps[index]; + var objValue = object[key], + othValue = other[key]; + + if (customizer) { + var compared = isPartial + ? customizer(othValue, objValue, key, other, object, stack) + : customizer(objValue, othValue, key, object, other, stack); + } + // Recursively compare objects (susceptible to call stack limits). + if (!(compared === undefined + ? (objValue === othValue || equalFunc(objValue, othValue, bitmask, customizer, stack)) + : compared + )) { + result = false; + break; + } + skipCtor || (skipCtor = key == 'constructor'); + } + if (result && !skipCtor) { + var objCtor = object.constructor, + othCtor = other.constructor; + + // Non `Object` object instances with different constructors are not equal. + if (objCtor != othCtor && + ('constructor' in object && 'constructor' in other) && + !(typeof objCtor == 'function' && objCtor instanceof objCtor && + typeof othCtor == 'function' && othCtor instanceof othCtor)) { + result = false; + } + } + stack['delete'](object); + stack['delete'](other); + return result; + } + + /** + * A specialized version of `baseRest` which flattens the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + function flatRest(func) { + return setToString(overRest(func, undefined, flatten), func + ''); + } + + /** + * Creates an array of own enumerable property names and symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ + function getAllKeys(object) { + return baseGetAllKeys(object, keys, getSymbols); + } + + /** + * Creates an array of own and inherited enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ + function getAllKeysIn(object) { + return baseGetAllKeys(object, keysIn, getSymbolsIn); + } + + /** + * Gets metadata for `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {*} Returns the metadata for `func`. + */ + var getData = !metaMap ? noop : function(func) { + return metaMap.get(func); + }; + + /** + * Gets the name of `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {string} Returns the function name. + */ + function getFuncName(func) { + var result = (func.name + ''), + array = realNames[result], + length = hasOwnProperty.call(realNames, result) ? array.length : 0; + + while (length--) { + var data = array[length], + otherFunc = data.func; + if (otherFunc == null || otherFunc == func) { + return data.name; + } + } + return result; + } + + /** + * Gets the argument placeholder value for `func`. + * + * @private + * @param {Function} func The function to inspect. + * @returns {*} Returns the placeholder value. + */ + function getHolder(func) { + var object = hasOwnProperty.call(lodash, 'placeholder') ? lodash : func; + return object.placeholder; + } + + /** + * Gets the appropriate "iteratee" function. If `_.iteratee` is customized, + * this function returns the custom method, otherwise it returns `baseIteratee`. + * If arguments are provided, the chosen function is invoked with them and + * its result is returned. + * + * @private + * @param {*} [value] The value to convert to an iteratee. + * @param {number} [arity] The arity of the created iteratee. + * @returns {Function} Returns the chosen function or its result. + */ + function getIteratee() { + var result = lodash.iteratee || iteratee; + result = result === iteratee ? baseIteratee : result; + return arguments.length ? result(arguments[0], arguments[1]) : result; + } + + /** + * Gets the data for `map`. + * + * @private + * @param {Object} map The map to query. + * @param {string} key The reference key. + * @returns {*} Returns the map data. + */ + function getMapData(map, key) { + var data = map.__data__; + return isKeyable(key) + ? data[typeof key == 'string' ? 'string' : 'hash'] + : data.map; + } + + /** + * Gets the property names, values, and compare flags of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the match data of `object`. + */ + function getMatchData(object) { + var result = keys(object), + length = result.length; + + while (length--) { + var key = result[length], + value = object[key]; + + result[length] = [key, value, isStrictComparable(value)]; + } + return result; + } + + /** + * Gets the native function at `key` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the method to get. + * @returns {*} Returns the function if it's native, else `undefined`. + */ + function getNative(object, key) { + var value = getValue(object, key); + return baseIsNative(value) ? value : undefined; + } + + /** + * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the raw `toStringTag`. + */ + function getRawTag(value) { + var isOwn = hasOwnProperty.call(value, symToStringTag), + tag = value[symToStringTag]; + + try { + value[symToStringTag] = undefined; + var unmasked = true; + } catch (e) {} + + var result = nativeObjectToString.call(value); + if (unmasked) { + if (isOwn) { + value[symToStringTag] = tag; + } else { + delete value[symToStringTag]; + } + } + return result; + } + + /** + * Creates an array of the own enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ + var getSymbols = !nativeGetSymbols ? stubArray : function(object) { + if (object == null) { + return []; + } + object = Object(object); + return arrayFilter(nativeGetSymbols(object), function(symbol) { + return propertyIsEnumerable.call(object, symbol); + }); + }; + + /** + * Creates an array of the own and inherited enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ + var getSymbolsIn = !nativeGetSymbols ? stubArray : function(object) { + var result = []; + while (object) { + arrayPush(result, getSymbols(object)); + object = getPrototype(object); + } + return result; + }; + + /** + * Gets the `toStringTag` of `value`. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + var getTag = baseGetTag; + + // Fallback for data views, maps, sets, and weak maps in IE 11 and promises in Node.js < 6. + if ((DataView && getTag(new DataView(new ArrayBuffer(1))) != dataViewTag) || + (Map && getTag(new Map) != mapTag) || + (Promise && getTag(Promise.resolve()) != promiseTag) || + (Set && getTag(new Set) != setTag) || + (WeakMap && getTag(new WeakMap) != weakMapTag)) { + getTag = function(value) { + var result = baseGetTag(value), + Ctor = result == objectTag ? value.constructor : undefined, + ctorString = Ctor ? toSource(Ctor) : ''; + + if (ctorString) { + switch (ctorString) { + case dataViewCtorString: return dataViewTag; + case mapCtorString: return mapTag; + case promiseCtorString: return promiseTag; + case setCtorString: return setTag; + case weakMapCtorString: return weakMapTag; + } + } + return result; + }; + } + + /** + * Gets the view, applying any `transforms` to the `start` and `end` positions. + * + * @private + * @param {number} start The start of the view. + * @param {number} end The end of the view. + * @param {Array} transforms The transformations to apply to the view. + * @returns {Object} Returns an object containing the `start` and `end` + * positions of the view. + */ + function getView(start, end, transforms) { + var index = -1, + length = transforms.length; + + while (++index < length) { + var data = transforms[index], + size = data.size; + + switch (data.type) { + case 'drop': start += size; break; + case 'dropRight': end -= size; break; + case 'take': end = nativeMin(end, start + size); break; + case 'takeRight': start = nativeMax(start, end - size); break; + } + } + return { 'start': start, 'end': end }; + } + + /** + * Extracts wrapper details from the `source` body comment. + * + * @private + * @param {string} source The source to inspect. + * @returns {Array} Returns the wrapper details. + */ + function getWrapDetails(source) { + var match = source.match(reWrapDetails); + return match ? match[1].split(reSplitDetails) : []; + } + + /** + * Checks if `path` exists on `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @param {Function} hasFunc The function to check properties. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + */ + function hasPath(object, path, hasFunc) { + path = castPath(path, object); + + var index = -1, + length = path.length, + result = false; + + while (++index < length) { + var key = toKey(path[index]); + if (!(result = object != null && hasFunc(object, key))) { + break; + } + object = object[key]; + } + if (result || ++index != length) { + return result; + } + length = object == null ? 0 : object.length; + return !!length && isLength(length) && isIndex(key, length) && + (isArray(object) || isArguments(object)); + } + + /** + * Initializes an array clone. + * + * @private + * @param {Array} array The array to clone. + * @returns {Array} Returns the initialized clone. + */ + function initCloneArray(array) { + var length = array.length, + result = new array.constructor(length); + + // Add properties assigned by `RegExp#exec`. + if (length && typeof array[0] == 'string' && hasOwnProperty.call(array, 'index')) { + result.index = array.index; + result.input = array.input; + } + return result; + } + + /** + * Initializes an object clone. + * + * @private + * @param {Object} object The object to clone. + * @returns {Object} Returns the initialized clone. + */ + function initCloneObject(object) { + return (typeof object.constructor == 'function' && !isPrototype(object)) + ? baseCreate(getPrototype(object)) + : {}; + } + + /** + * Initializes an object clone based on its `toStringTag`. + * + * **Note:** This function only supports cloning values with tags of + * `Boolean`, `Date`, `Error`, `Map`, `Number`, `RegExp`, `Set`, or `String`. + * + * @private + * @param {Object} object The object to clone. + * @param {string} tag The `toStringTag` of the object to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the initialized clone. + */ + function initCloneByTag(object, tag, isDeep) { + var Ctor = object.constructor; + switch (tag) { + case arrayBufferTag: + return cloneArrayBuffer(object); + + case boolTag: + case dateTag: + return new Ctor(+object); + + case dataViewTag: + return cloneDataView(object, isDeep); + + case float32Tag: case float64Tag: + case int8Tag: case int16Tag: case int32Tag: + case uint8Tag: case uint8ClampedTag: case uint16Tag: case uint32Tag: + return cloneTypedArray(object, isDeep); + + case mapTag: + return new Ctor; + + case numberTag: + case stringTag: + return new Ctor(object); + + case regexpTag: + return cloneRegExp(object); + + case setTag: + return new Ctor; + + case symbolTag: + return cloneSymbol(object); + } + } + + /** + * Inserts wrapper `details` in a comment at the top of the `source` body. + * + * @private + * @param {string} source The source to modify. + * @returns {Array} details The details to insert. + * @returns {string} Returns the modified source. + */ + function insertWrapDetails(source, details) { + var length = details.length; + if (!length) { + return source; + } + var lastIndex = length - 1; + details[lastIndex] = (length > 1 ? '& ' : '') + details[lastIndex]; + details = details.join(length > 2 ? ', ' : ' '); + return source.replace(reWrapComment, '{\n/* [wrapped with ' + details + '] */\n'); + } + + /** + * Checks if `value` is a flattenable `arguments` object or array. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is flattenable, else `false`. + */ + function isFlattenable(value) { + return isArray(value) || isArguments(value) || + !!(spreadableSymbol && value && value[spreadableSymbol]); + } + + /** + * Checks if `value` is a valid array-like index. + * + * @private + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + */ + function isIndex(value, length) { + var type = typeof value; + length = length == null ? MAX_SAFE_INTEGER : length; + + return !!length && + (type == 'number' || + (type != 'symbol' && reIsUint.test(value))) && + (value > -1 && value % 1 == 0 && value < length); + } + + /** + * Checks if the given arguments are from an iteratee call. + * + * @private + * @param {*} value The potential iteratee value argument. + * @param {*} index The potential iteratee index or key argument. + * @param {*} object The potential iteratee object argument. + * @returns {boolean} Returns `true` if the arguments are from an iteratee call, + * else `false`. + */ + function isIterateeCall(value, index, object) { + if (!isObject(object)) { + return false; + } + var type = typeof index; + if (type == 'number' + ? (isArrayLike(object) && isIndex(index, object.length)) + : (type == 'string' && index in object) + ) { + return eq(object[index], value); + } + return false; + } + + /** + * Checks if `value` is a property name and not a property path. + * + * @private + * @param {*} value The value to check. + * @param {Object} [object] The object to query keys on. + * @returns {boolean} Returns `true` if `value` is a property name, else `false`. + */ + function isKey(value, object) { + if (isArray(value)) { + return false; + } + var type = typeof value; + if (type == 'number' || type == 'symbol' || type == 'boolean' || + value == null || isSymbol(value)) { + return true; + } + return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || + (object != null && value in Object(object)); + } + + /** + * Checks if `value` is suitable for use as unique object key. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is suitable, else `false`. + */ + function isKeyable(value) { + var type = typeof value; + return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') + ? (value !== '__proto__') + : (value === null); + } + + /** + * Checks if `func` has a lazy counterpart. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` has a lazy counterpart, + * else `false`. + */ + function isLaziable(func) { + var funcName = getFuncName(func), + other = lodash[funcName]; + + if (typeof other != 'function' || !(funcName in LazyWrapper.prototype)) { + return false; + } + if (func === other) { + return true; + } + var data = getData(other); + return !!data && func === data[0]; + } + + /** + * Checks if `func` has its source masked. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` is masked, else `false`. + */ + function isMasked(func) { + return !!maskSrcKey && (maskSrcKey in func); + } + + /** + * Checks if `func` is capable of being masked. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `func` is maskable, else `false`. + */ + var isMaskable = coreJsData ? isFunction : stubFalse; + + /** + * Checks if `value` is likely a prototype object. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. + */ + function isPrototype(value) { + var Ctor = value && value.constructor, + proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; + + return value === proto; + } + + /** + * Checks if `value` is suitable for strict equality comparisons, i.e. `===`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` if suitable for strict + * equality comparisons, else `false`. + */ + function isStrictComparable(value) { + return value === value && !isObject(value); + } + + /** + * A specialized version of `matchesProperty` for source values suitable + * for strict equality comparisons, i.e. `===`. + * + * @private + * @param {string} key The key of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ + function matchesStrictComparable(key, srcValue) { + return function(object) { + if (object == null) { + return false; + } + return object[key] === srcValue && + (srcValue !== undefined || (key in Object(object))); + }; + } + + /** + * A specialized version of `_.memoize` which clears the memoized function's + * cache when it exceeds `MAX_MEMOIZE_SIZE`. + * + * @private + * @param {Function} func The function to have its output memoized. + * @returns {Function} Returns the new memoized function. + */ + function memoizeCapped(func) { + var result = memoize(func, function(key) { + if (cache.size === MAX_MEMOIZE_SIZE) { + cache.clear(); + } + return key; + }); + + var cache = result.cache; + return result; + } + + /** + * Merges the function metadata of `source` into `data`. + * + * Merging metadata reduces the number of wrappers used to invoke a function. + * This is possible because methods like `_.bind`, `_.curry`, and `_.partial` + * may be applied regardless of execution order. Methods like `_.ary` and + * `_.rearg` modify function arguments, making the order in which they are + * executed important, preventing the merging of metadata. However, we make + * an exception for a safe combined case where curried functions have `_.ary` + * and or `_.rearg` applied. + * + * @private + * @param {Array} data The destination metadata. + * @param {Array} source The source metadata. + * @returns {Array} Returns `data`. + */ + function mergeData(data, source) { + var bitmask = data[1], + srcBitmask = source[1], + newBitmask = bitmask | srcBitmask, + isCommon = newBitmask < (WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG | WRAP_ARY_FLAG); + + var isCombo = + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_CURRY_FLAG)) || + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_REARG_FLAG) && (data[7].length <= source[8])) || + ((srcBitmask == (WRAP_ARY_FLAG | WRAP_REARG_FLAG)) && (source[7].length <= source[8]) && (bitmask == WRAP_CURRY_FLAG)); + + // Exit early if metadata can't be merged. + if (!(isCommon || isCombo)) { + return data; + } + // Use source `thisArg` if available. + if (srcBitmask & WRAP_BIND_FLAG) { + data[2] = source[2]; + // Set when currying a bound function. + newBitmask |= bitmask & WRAP_BIND_FLAG ? 0 : WRAP_CURRY_BOUND_FLAG; + } + // Compose partial arguments. + var value = source[3]; + if (value) { + var partials = data[3]; + data[3] = partials ? composeArgs(partials, value, source[4]) : value; + data[4] = partials ? replaceHolders(data[3], PLACEHOLDER) : source[4]; + } + // Compose partial right arguments. + value = source[5]; + if (value) { + partials = data[5]; + data[5] = partials ? composeArgsRight(partials, value, source[6]) : value; + data[6] = partials ? replaceHolders(data[5], PLACEHOLDER) : source[6]; + } + // Use source `argPos` if available. + value = source[7]; + if (value) { + data[7] = value; + } + // Use source `ary` if it's smaller. + if (srcBitmask & WRAP_ARY_FLAG) { + data[8] = data[8] == null ? source[8] : nativeMin(data[8], source[8]); + } + // Use source `arity` if one is not provided. + if (data[9] == null) { + data[9] = source[9]; + } + // Use source `func` and merge bitmasks. + data[0] = source[0]; + data[1] = newBitmask; + + return data; + } + + /** + * This function is like + * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * except that it includes inherited enumerable properties. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function nativeKeysIn(object) { + var result = []; + if (object != null) { + for (var key in Object(object)) { + result.push(key); + } + } + return result; + } + + /** + * Converts `value` to a string using `Object.prototype.toString`. + * + * @private + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + */ + function objectToString(value) { + return nativeObjectToString.call(value); + } + + /** + * A specialized version of `baseRest` which transforms the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @param {Function} transform The rest array transform. + * @returns {Function} Returns the new function. + */ + function overRest(func, start, transform) { + start = nativeMax(start === undefined ? (func.length - 1) : start, 0); + return function() { + var args = arguments, + index = -1, + length = nativeMax(args.length - start, 0), + array = Array(length); + + while (++index < length) { + array[index] = args[start + index]; + } + index = -1; + var otherArgs = Array(start + 1); + while (++index < start) { + otherArgs[index] = args[index]; + } + otherArgs[start] = transform(array); + return apply(func, this, otherArgs); + }; + } + + /** + * Gets the parent value at `path` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} path The path to get the parent value of. + * @returns {*} Returns the parent value. + */ + function parent(object, path) { + return path.length < 2 ? object : baseGet(object, baseSlice(path, 0, -1)); + } + + /** + * Reorder `array` according to the specified indexes where the element at + * the first index is assigned as the first element, the element at + * the second index is assigned as the second element, and so on. + * + * @private + * @param {Array} array The array to reorder. + * @param {Array} indexes The arranged array indexes. + * @returns {Array} Returns `array`. + */ + function reorder(array, indexes) { + var arrLength = array.length, + length = nativeMin(indexes.length, arrLength), + oldArray = copyArray(array); + + while (length--) { + var index = indexes[length]; + array[length] = isIndex(index, arrLength) ? oldArray[index] : undefined; + } + return array; + } + + /** + * Sets metadata for `func`. + * + * **Note:** If this function becomes hot, i.e. is invoked a lot in a short + * period of time, it will trip its breaker and transition to an identity + * function to avoid garbage collection pauses in V8. See + * [V8 issue 2070](https://bugs.chromium.org/p/v8/issues/detail?id=2070) + * for more details. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ + var setData = shortOut(baseSetData); + + /** + * A simple wrapper around the global [`setTimeout`](https://mdn.io/setTimeout). + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @returns {number|Object} Returns the timer id or timeout object. + */ + var setTimeout = ctxSetTimeout || function(func, wait) { + return root.setTimeout(func, wait); + }; + + /** + * Sets the `toString` method of `func` to return `string`. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var setToString = shortOut(baseSetToString); + + /** + * Sets the `toString` method of `wrapper` to mimic the source of `reference` + * with wrapper details in a comment at the top of the source body. + * + * @private + * @param {Function} wrapper The function to modify. + * @param {Function} reference The reference function. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Function} Returns `wrapper`. + */ + function setWrapToString(wrapper, reference, bitmask) { + var source = (reference + ''); + return setToString(wrapper, insertWrapDetails(source, updateWrapDetails(getWrapDetails(source), bitmask))); + } + + /** + * Creates a function that'll short out and invoke `identity` instead + * of `func` when it's called `HOT_COUNT` or more times in `HOT_SPAN` + * milliseconds. + * + * @private + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new shortable function. + */ + function shortOut(func) { + var count = 0, + lastCalled = 0; + + return function() { + var stamp = nativeNow(), + remaining = HOT_SPAN - (stamp - lastCalled); + + lastCalled = stamp; + if (remaining > 0) { + if (++count >= HOT_COUNT) { + return arguments[0]; + } + } else { + count = 0; + } + return func.apply(undefined, arguments); + }; + } + + /** + * A specialized version of `_.shuffle` which mutates and sets the size of `array`. + * + * @private + * @param {Array} array The array to shuffle. + * @param {number} [size=array.length] The size of `array`. + * @returns {Array} Returns `array`. + */ + function shuffleSelf(array, size) { + var index = -1, + length = array.length, + lastIndex = length - 1; + + size = size === undefined ? length : size; + while (++index < size) { + var rand = baseRandom(index, lastIndex), + value = array[rand]; + + array[rand] = array[index]; + array[index] = value; + } + array.length = size; + return array; + } + + /** + * Converts `string` to a property path array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the property path array. + */ + var stringToPath = memoizeCapped(function(string) { + var result = []; + if (string.charCodeAt(0) === 46 /* . */) { + result.push(''); + } + string.replace(rePropName, function(match, number, quote, subString) { + result.push(quote ? subString.replace(reEscapeChar, '$1') : (number || match)); + }); + return result; + }); + + /** + * Converts `value` to a string key if it's not a string or symbol. + * + * @private + * @param {*} value The value to inspect. + * @returns {string|symbol} Returns the key. + */ + function toKey(value) { + if (typeof value == 'string' || isSymbol(value)) { + return value; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + } + + /** + * Converts `func` to its source code. + * + * @private + * @param {Function} func The function to convert. + * @returns {string} Returns the source code. + */ + function toSource(func) { + if (func != null) { + try { + return funcToString.call(func); + } catch (e) {} + try { + return (func + ''); + } catch (e) {} + } + return ''; + } + + /** + * Updates wrapper `details` based on `bitmask` flags. + * + * @private + * @returns {Array} details The details to modify. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Array} Returns `details`. + */ + function updateWrapDetails(details, bitmask) { + arrayEach(wrapFlags, function(pair) { + var value = '_.' + pair[0]; + if ((bitmask & pair[1]) && !arrayIncludes(details, value)) { + details.push(value); + } + }); + return details.sort(); + } + + /** + * Creates a clone of `wrapper`. + * + * @private + * @param {Object} wrapper The wrapper to clone. + * @returns {Object} Returns the cloned wrapper. + */ + function wrapperClone(wrapper) { + if (wrapper instanceof LazyWrapper) { + return wrapper.clone(); + } + var result = new LodashWrapper(wrapper.__wrapped__, wrapper.__chain__); + result.__actions__ = copyArray(wrapper.__actions__); + result.__index__ = wrapper.__index__; + result.__values__ = wrapper.__values__; + return result; + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates an array of elements split into groups the length of `size`. + * If `array` can't be split evenly, the final chunk will be the remaining + * elements. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to process. + * @param {number} [size=1] The length of each chunk + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the new array of chunks. + * @example + * + * _.chunk(['a', 'b', 'c', 'd'], 2); + * // => [['a', 'b'], ['c', 'd']] + * + * _.chunk(['a', 'b', 'c', 'd'], 3); + * // => [['a', 'b', 'c'], ['d']] + */ + function chunk(array, size, guard) { + if ((guard ? isIterateeCall(array, size, guard) : size === undefined)) { + size = 1; + } else { + size = nativeMax(toInteger(size), 0); + } + var length = array == null ? 0 : array.length; + if (!length || size < 1) { + return []; + } + var index = 0, + resIndex = 0, + result = Array(nativeCeil(length / size)); + + while (index < length) { + result[resIndex++] = baseSlice(array, index, (index += size)); + } + return result; + } + + /** + * Creates an array with all falsey values removed. The values `false`, `null`, + * `0`, `""`, `undefined`, and `NaN` are falsey. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to compact. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.compact([0, 1, false, 2, '', 3]); + * // => [1, 2, 3] + */ + function compact(array) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value) { + result[resIndex++] = value; + } + } + return result; + } + + /** + * Creates a new array concatenating `array` with any additional arrays + * and/or values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to concatenate. + * @param {...*} [values] The values to concatenate. + * @returns {Array} Returns the new concatenated array. + * @example + * + * var array = [1]; + * var other = _.concat(array, 2, [3], [[4]]); + * + * console.log(other); + * // => [1, 2, 3, [4]] + * + * console.log(array); + * // => [1] + */ + function concat() { + var length = arguments.length; + if (!length) { + return []; + } + var args = Array(length - 1), + array = arguments[0], + index = length; + + while (index--) { + args[index - 1] = arguments[index]; + } + return arrayPush(isArray(array) ? copyArray(array) : [array], baseFlatten(args, 1)); + } + + /** + * Creates an array of `array` values not included in the other given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * **Note:** Unlike `_.pullAll`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.without, _.xor + * @example + * + * _.difference([2, 1], [2, 3]); + * // => [1] + */ + var difference = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true)) + : []; + }); + + /** + * This method is like `_.difference` except that it accepts `iteratee` which + * is invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * **Note:** Unlike `_.pullAllBy`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.differenceBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2] + * + * // The `_.property` iteratee shorthand. + * _.differenceBy([{ 'x': 2 }, { 'x': 1 }], [{ 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ + var differenceBy = baseRest(function(array, values) { + var iteratee = last(values); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), getIteratee(iteratee, 2)) + : []; + }); + + /** + * This method is like `_.difference` except that it accepts `comparator` + * which is invoked to compare elements of `array` to `values`. The order and + * references of result values are determined by the first array. The comparator + * is invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.pullAllWith`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * + * _.differenceWith(objects, [{ 'x': 1, 'y': 2 }], _.isEqual); + * // => [{ 'x': 2, 'y': 1 }] + */ + var differenceWith = baseRest(function(array, values) { + var comparator = last(values); + if (isArrayLikeObject(comparator)) { + comparator = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), undefined, comparator) + : []; + }); + + /** + * Creates a slice of `array` with `n` elements dropped from the beginning. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.drop([1, 2, 3]); + * // => [2, 3] + * + * _.drop([1, 2, 3], 2); + * // => [3] + * + * _.drop([1, 2, 3], 5); + * // => [] + * + * _.drop([1, 2, 3], 0); + * // => [1, 2, 3] + */ + function drop(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, n < 0 ? 0 : n, length); + } + + /** + * Creates a slice of `array` with `n` elements dropped from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.dropRight([1, 2, 3]); + * // => [1, 2] + * + * _.dropRight([1, 2, 3], 2); + * // => [1] + * + * _.dropRight([1, 2, 3], 5); + * // => [] + * + * _.dropRight([1, 2, 3], 0); + * // => [1, 2, 3] + */ + function dropRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, 0, n < 0 ? 0 : n); + } + + /** + * Creates a slice of `array` excluding elements dropped from the end. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.dropRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney'] + * + * // The `_.matches` iteratee shorthand. + * _.dropRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropRightWhile(users, ['active', false]); + * // => objects for ['barney'] + * + * // The `_.property` iteratee shorthand. + * _.dropRightWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ + function dropRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), true, true) + : []; + } + + /** + * Creates a slice of `array` excluding elements dropped from the beginning. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.dropWhile(users, function(o) { return !o.active; }); + * // => objects for ['pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.dropWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropWhile(users, ['active', false]); + * // => objects for ['pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.dropWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ + function dropWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), true) + : []; + } + + /** + * Fills elements of `array` with `value` from `start` up to, but not + * including, `end`. + * + * **Note:** This method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Array + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.fill(array, 'a'); + * console.log(array); + * // => ['a', 'a', 'a'] + * + * _.fill(Array(3), 2); + * // => [2, 2, 2] + * + * _.fill([4, 6, 8, 10], '*', 1, 3); + * // => [4, '*', '*', 10] + */ + function fill(array, value, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (start && typeof start != 'number' && isIterateeCall(array, value, start)) { + start = 0; + end = length; + } + return baseFill(array, value, start, end); + } + + /** + * This method is like `_.find` except that it returns the index of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.findIndex(users, function(o) { return o.user == 'barney'; }); + * // => 0 + * + * // The `_.matches` iteratee shorthand. + * _.findIndex(users, { 'user': 'fred', 'active': false }); + * // => 1 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findIndex(users, ['active', false]); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.findIndex(users, 'active'); + * // => 2 + */ + function findIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseFindIndex(array, getIteratee(predicate, 3), index); + } + + /** + * This method is like `_.findIndex` except that it iterates over elements + * of `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.findLastIndex(users, function(o) { return o.user == 'pebbles'; }); + * // => 2 + * + * // The `_.matches` iteratee shorthand. + * _.findLastIndex(users, { 'user': 'barney', 'active': true }); + * // => 0 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastIndex(users, ['active', false]); + * // => 2 + * + * // The `_.property` iteratee shorthand. + * _.findLastIndex(users, 'active'); + * // => 0 + */ + function findLastIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length - 1; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = fromIndex < 0 + ? nativeMax(length + index, 0) + : nativeMin(index, length - 1); + } + return baseFindIndex(array, getIteratee(predicate, 3), index, true); + } + + /** + * Flattens `array` a single level deep. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flatten([1, [2, [3, [4]], 5]]); + * // => [1, 2, [3, [4]], 5] + */ + function flatten(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, 1) : []; + } + + /** + * Recursively flattens `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flattenDeep([1, [2, [3, [4]], 5]]); + * // => [1, 2, 3, 4, 5] + */ + function flattenDeep(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, INFINITY) : []; + } + + /** + * Recursively flatten `array` up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Array + * @param {Array} array The array to flatten. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * var array = [1, [2, [3, [4]], 5]]; + * + * _.flattenDepth(array, 1); + * // => [1, 2, [3, [4]], 5] + * + * _.flattenDepth(array, 2); + * // => [1, 2, 3, [4], 5] + */ + function flattenDepth(array, depth) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(array, depth); + } + + /** + * The inverse of `_.toPairs`; this method returns an object composed + * from key-value `pairs`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} pairs The key-value pairs. + * @returns {Object} Returns the new object. + * @example + * + * _.fromPairs([['a', 1], ['b', 2]]); + * // => { 'a': 1, 'b': 2 } + */ + function fromPairs(pairs) { + var index = -1, + length = pairs == null ? 0 : pairs.length, + result = {}; + + while (++index < length) { + var pair = pairs[index]; + result[pair[0]] = pair[1]; + } + return result; + } + + /** + * Gets the first element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias first + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the first element of `array`. + * @example + * + * _.head([1, 2, 3]); + * // => 1 + * + * _.head([]); + * // => undefined + */ + function head(array) { + return (array && array.length) ? array[0] : undefined; + } + + /** + * Gets the index at which the first occurrence of `value` is found in `array` + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. If `fromIndex` is negative, it's used as the + * offset from the end of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.indexOf([1, 2, 1, 2], 2); + * // => 1 + * + * // Search from the `fromIndex`. + * _.indexOf([1, 2, 1, 2], 2, 2); + * // => 3 + */ + function indexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseIndexOf(array, value, index); + } + + /** + * Gets all but the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.initial([1, 2, 3]); + * // => [1, 2] + */ + function initial(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 0, -1) : []; + } + + /** + * Creates an array of unique values that are included in all given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersection([2, 1], [2, 3]); + * // => [2] + */ + var intersection = baseRest(function(arrays) { + var mapped = arrayMap(arrays, castArrayLikeObject); + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped) + : []; + }); + + /** + * This method is like `_.intersection` except that it accepts `iteratee` + * which is invoked for each element of each `arrays` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersectionBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [2.1] + * + * // The `_.property` iteratee shorthand. + * _.intersectionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }] + */ + var intersectionBy = baseRest(function(arrays) { + var iteratee = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); + + if (iteratee === last(mapped)) { + iteratee = undefined; + } else { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, getIteratee(iteratee, 2)) + : []; + }); + + /** + * This method is like `_.intersection` except that it accepts `comparator` + * which is invoked to compare elements of `arrays`. The order and references + * of result values are determined by the first array. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.intersectionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }] + */ + var intersectionWith = baseRest(function(arrays) { + var comparator = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); + + comparator = typeof comparator == 'function' ? comparator : undefined; + if (comparator) { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, undefined, comparator) + : []; + }); + + /** + * Converts all elements in `array` into a string separated by `separator`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to convert. + * @param {string} [separator=','] The element separator. + * @returns {string} Returns the joined string. + * @example + * + * _.join(['a', 'b', 'c'], '~'); + * // => 'a~b~c' + */ + function join(array, separator) { + return array == null ? '' : nativeJoin.call(array, separator); + } + + /** + * Gets the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the last element of `array`. + * @example + * + * _.last([1, 2, 3]); + * // => 3 + */ + function last(array) { + var length = array == null ? 0 : array.length; + return length ? array[length - 1] : undefined; + } + + /** + * This method is like `_.indexOf` except that it iterates over elements of + * `array` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.lastIndexOf([1, 2, 1, 2], 2); + * // => 3 + * + * // Search from the `fromIndex`. + * _.lastIndexOf([1, 2, 1, 2], 2, 2); + * // => 1 + */ + function lastIndexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1); + } + return value === value + ? strictLastIndexOf(array, value, index) + : baseFindIndex(array, baseIsNaN, index, true); + } + + /** + * Gets the element at index `n` of `array`. If `n` is negative, the nth + * element from the end is returned. + * + * @static + * @memberOf _ + * @since 4.11.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=0] The index of the element to return. + * @returns {*} Returns the nth element of `array`. + * @example + * + * var array = ['a', 'b', 'c', 'd']; + * + * _.nth(array, 1); + * // => 'b' + * + * _.nth(array, -2); + * // => 'c'; + */ + function nth(array, n) { + return (array && array.length) ? baseNth(array, toInteger(n)) : undefined; + } + + /** + * Removes all given values from `array` using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * **Note:** Unlike `_.without`, this method mutates `array`. Use `_.remove` + * to remove elements from an array by predicate. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {...*} [values] The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = ['a', 'b', 'c', 'a', 'b', 'c']; + * + * _.pull(array, 'a', 'c'); + * console.log(array); + * // => ['b', 'b'] + */ + var pull = baseRest(pullAll); + + /** + * This method is like `_.pull` except that it accepts an array of values to remove. + * + * **Note:** Unlike `_.difference`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = ['a', 'b', 'c', 'a', 'b', 'c']; + * + * _.pullAll(array, ['a', 'c']); + * console.log(array); + * // => ['b', 'b'] + */ + function pullAll(array, values) { + return (array && array.length && values && values.length) + ? basePullAll(array, values) + : array; + } + + /** + * This method is like `_.pullAll` except that it accepts `iteratee` which is + * invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The iteratee is invoked with one argument: (value). + * + * **Note:** Unlike `_.differenceBy`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns `array`. + * @example + * + * var array = [{ 'x': 1 }, { 'x': 2 }, { 'x': 3 }, { 'x': 1 }]; + * + * _.pullAllBy(array, [{ 'x': 1 }, { 'x': 3 }], 'x'); + * console.log(array); + * // => [{ 'x': 2 }] + */ + function pullAllBy(array, values, iteratee) { + return (array && array.length && values && values.length) + ? basePullAll(array, values, getIteratee(iteratee, 2)) + : array; + } + + /** + * This method is like `_.pullAll` except that it accepts `comparator` which + * is invoked to compare elements of `array` to `values`. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.differenceWith`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + * @example + * + * var array = [{ 'x': 1, 'y': 2 }, { 'x': 3, 'y': 4 }, { 'x': 5, 'y': 6 }]; + * + * _.pullAllWith(array, [{ 'x': 3, 'y': 4 }], _.isEqual); + * console.log(array); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 5, 'y': 6 }] + */ + function pullAllWith(array, values, comparator) { + return (array && array.length && values && values.length) + ? basePullAll(array, values, undefined, comparator) + : array; + } + + /** + * Removes elements from `array` corresponding to `indexes` and returns an + * array of removed elements. + * + * **Note:** Unlike `_.at`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {...(number|number[])} [indexes] The indexes of elements to remove. + * @returns {Array} Returns the new array of removed elements. + * @example + * + * var array = ['a', 'b', 'c', 'd']; + * var pulled = _.pullAt(array, [1, 3]); + * + * console.log(array); + * // => ['a', 'c'] + * + * console.log(pulled); + * // => ['b', 'd'] + */ + var pullAt = flatRest(function(array, indexes) { + var length = array == null ? 0 : array.length, + result = baseAt(array, indexes); + + basePullAt(array, arrayMap(indexes, function(index) { + return isIndex(index, length) ? +index : index; + }).sort(compareAscending)); + + return result; + }); + + /** + * Removes all elements from `array` that `predicate` returns truthy for + * and returns an array of the removed elements. The predicate is invoked + * with three arguments: (value, index, array). + * + * **Note:** Unlike `_.filter`, this method mutates `array`. Use `_.pull` + * to pull elements from an array by value. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new array of removed elements. + * @example + * + * var array = [1, 2, 3, 4]; + * var evens = _.remove(array, function(n) { + * return n % 2 == 0; + * }); + * + * console.log(array); + * // => [1, 3] + * + * console.log(evens); + * // => [2, 4] + */ + function remove(array, predicate) { + var result = []; + if (!(array && array.length)) { + return result; + } + var index = -1, + indexes = [], + length = array.length; + + predicate = getIteratee(predicate, 3); + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result.push(value); + indexes.push(index); + } + } + basePullAt(array, indexes); + return result; + } + + /** + * Reverses `array` so that the first element becomes the last, the second + * element becomes the second to last, and so on. + * + * **Note:** This method mutates `array` and is based on + * [`Array#reverse`](https://mdn.io/Array/reverse). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.reverse(array); + * // => [3, 2, 1] + * + * console.log(array); + * // => [3, 2, 1] + */ + function reverse(array) { + return array == null ? array : nativeReverse.call(array); + } + + /** + * Creates a slice of `array` from `start` up to, but not including, `end`. + * + * **Note:** This method is used instead of + * [`Array#slice`](https://mdn.io/Array/slice) to ensure dense arrays are + * returned. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function slice(array, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (end && typeof end != 'number' && isIterateeCall(array, start, end)) { + start = 0; + end = length; + } + else { + start = start == null ? 0 : toInteger(start); + end = end === undefined ? length : toInteger(end); + } + return baseSlice(array, start, end); + } + + /** + * Uses a binary search to determine the lowest index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedIndex([30, 50], 40); + * // => 1 + */ + function sortedIndex(array, value) { + return baseSortedIndex(array, value); + } + + /** + * This method is like `_.sortedIndex` except that it accepts `iteratee` + * which is invoked for `value` and each element of `array` to compute their + * sort ranking. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * var objects = [{ 'x': 4 }, { 'x': 5 }]; + * + * _.sortedIndexBy(objects, { 'x': 4 }, function(o) { return o.x; }); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.sortedIndexBy(objects, { 'x': 4 }, 'x'); + * // => 0 + */ + function sortedIndexBy(array, value, iteratee) { + return baseSortedIndexBy(array, value, getIteratee(iteratee, 2)); + } + + /** + * This method is like `_.indexOf` except that it performs a binary + * search on a sorted `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.sortedIndexOf([4, 5, 5, 5, 6], 5); + * // => 1 + */ + function sortedIndexOf(array, value) { + var length = array == null ? 0 : array.length; + if (length) { + var index = baseSortedIndex(array, value); + if (index < length && eq(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * This method is like `_.sortedIndex` except that it returns the highest + * index at which `value` should be inserted into `array` in order to + * maintain its sort order. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedLastIndex([4, 5, 5, 5, 6], 5); + * // => 4 + */ + function sortedLastIndex(array, value) { + return baseSortedIndex(array, value, true); + } + + /** + * This method is like `_.sortedLastIndex` except that it accepts `iteratee` + * which is invoked for `value` and each element of `array` to compute their + * sort ranking. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * var objects = [{ 'x': 4 }, { 'x': 5 }]; + * + * _.sortedLastIndexBy(objects, { 'x': 4 }, function(o) { return o.x; }); + * // => 1 + * + * // The `_.property` iteratee shorthand. + * _.sortedLastIndexBy(objects, { 'x': 4 }, 'x'); + * // => 1 + */ + function sortedLastIndexBy(array, value, iteratee) { + return baseSortedIndexBy(array, value, getIteratee(iteratee, 2), true); + } + + /** + * This method is like `_.lastIndexOf` except that it performs a binary + * search on a sorted `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.sortedLastIndexOf([4, 5, 5, 5, 6], 5); + * // => 3 + */ + function sortedLastIndexOf(array, value) { + var length = array == null ? 0 : array.length; + if (length) { + var index = baseSortedIndex(array, value, true) - 1; + if (eq(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * This method is like `_.uniq` except that it's designed and optimized + * for sorted arrays. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.sortedUniq([1, 1, 2]); + * // => [1, 2] + */ + function sortedUniq(array) { + return (array && array.length) + ? baseSortedUniq(array) + : []; + } + + /** + * This method is like `_.uniqBy` except that it's designed and optimized + * for sorted arrays. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.sortedUniqBy([1.1, 1.2, 2.3, 2.4], Math.floor); + * // => [1.1, 2.3] + */ + function sortedUniqBy(array, iteratee) { + return (array && array.length) + ? baseSortedUniq(array, getIteratee(iteratee, 2)) + : []; + } + + /** + * Gets all but the first element of `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.tail([1, 2, 3]); + * // => [2, 3] + */ + function tail(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 1, length) : []; + } + + /** + * Creates a slice of `array` with `n` elements taken from the beginning. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to take. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.take([1, 2, 3]); + * // => [1] + * + * _.take([1, 2, 3], 2); + * // => [1, 2] + * + * _.take([1, 2, 3], 5); + * // => [1, 2, 3] + * + * _.take([1, 2, 3], 0); + * // => [] + */ + function take(array, n, guard) { + if (!(array && array.length)) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, 0, n < 0 ? 0 : n); + } + + /** + * Creates a slice of `array` with `n` elements taken from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to take. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.takeRight([1, 2, 3]); + * // => [3] + * + * _.takeRight([1, 2, 3], 2); + * // => [2, 3] + * + * _.takeRight([1, 2, 3], 5); + * // => [1, 2, 3] + * + * _.takeRight([1, 2, 3], 0); + * // => [] + */ + function takeRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, n < 0 ? 0 : n, length); + } + + /** + * Creates a slice of `array` with elements taken from the end. Elements are + * taken until `predicate` returns falsey. The predicate is invoked with + * three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.takeRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.takeRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.takeRightWhile(users, ['active', false]); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.takeRightWhile(users, 'active'); + * // => [] + */ + function takeRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), false, true) + : []; + } + + /** + * Creates a slice of `array` with elements taken from the beginning. Elements + * are taken until `predicate` returns falsey. The predicate is invoked with + * three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.takeWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matches` iteratee shorthand. + * _.takeWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.takeWhile(users, ['active', false]); + * // => objects for ['barney', 'fred'] + * + * // The `_.property` iteratee shorthand. + * _.takeWhile(users, 'active'); + * // => [] + */ + function takeWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3)) + : []; + } + + /** + * Creates an array of unique values, in order, from all given arrays using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of combined values. + * @example + * + * _.union([2], [1, 2]); + * // => [2, 1] + */ + var union = baseRest(function(arrays) { + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true)); + }); + + /** + * This method is like `_.union` except that it accepts `iteratee` which is + * invoked for each element of each `arrays` to generate the criterion by + * which uniqueness is computed. Result values are chosen from the first + * array in which the value occurs. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of combined values. + * @example + * + * _.unionBy([2.1], [1.2, 2.3], Math.floor); + * // => [2.1, 1.2] + * + * // The `_.property` iteratee shorthand. + * _.unionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ + var unionBy = baseRest(function(arrays) { + var iteratee = last(arrays); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), getIteratee(iteratee, 2)); + }); + + /** + * This method is like `_.union` except that it accepts `comparator` which + * is invoked to compare elements of `arrays`. Result values are chosen from + * the first array in which the value occurs. The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of combined values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.unionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 1 }] + */ + var unionWith = baseRest(function(arrays) { + var comparator = last(arrays); + comparator = typeof comparator == 'function' ? comparator : undefined; + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), undefined, comparator); + }); + + /** + * Creates a duplicate-free version of an array, using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons, in which only the first occurrence of each element + * is kept. The order of result values is determined by the order they occur + * in the array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniq([2, 1, 2]); + * // => [2, 1] + */ + function uniq(array) { + return (array && array.length) ? baseUniq(array) : []; + } + + /** + * This method is like `_.uniq` except that it accepts `iteratee` which is + * invoked for each element in `array` to generate the criterion by which + * uniqueness is computed. The order of result values is determined by the + * order they occur in the array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniqBy([2.1, 1.2, 2.3], Math.floor); + * // => [2.1, 1.2] + * + * // The `_.property` iteratee shorthand. + * _.uniqBy([{ 'x': 1 }, { 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ + function uniqBy(array, iteratee) { + return (array && array.length) ? baseUniq(array, getIteratee(iteratee, 2)) : []; + } + + /** + * This method is like `_.uniq` except that it accepts `comparator` which + * is invoked to compare elements of `array`. The order of result values is + * determined by the order they occur in the array.The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.uniqWith(objects, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }] + */ + function uniqWith(array, comparator) { + comparator = typeof comparator == 'function' ? comparator : undefined; + return (array && array.length) ? baseUniq(array, undefined, comparator) : []; + } + + /** + * This method is like `_.zip` except that it accepts an array of grouped + * elements and creates an array regrouping the elements to their pre-zip + * configuration. + * + * @static + * @memberOf _ + * @since 1.2.0 + * @category Array + * @param {Array} array The array of grouped elements to process. + * @returns {Array} Returns the new array of regrouped elements. + * @example + * + * var zipped = _.zip(['a', 'b'], [1, 2], [true, false]); + * // => [['a', 1, true], ['b', 2, false]] + * + * _.unzip(zipped); + * // => [['a', 'b'], [1, 2], [true, false]] + */ + function unzip(array) { + if (!(array && array.length)) { + return []; + } + var length = 0; + array = arrayFilter(array, function(group) { + if (isArrayLikeObject(group)) { + length = nativeMax(group.length, length); + return true; + } + }); + return baseTimes(length, function(index) { + return arrayMap(array, baseProperty(index)); + }); + } + + /** + * This method is like `_.unzip` except that it accepts `iteratee` to specify + * how regrouped values should be combined. The iteratee is invoked with the + * elements of each group: (...group). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Array + * @param {Array} array The array of grouped elements to process. + * @param {Function} [iteratee=_.identity] The function to combine + * regrouped values. + * @returns {Array} Returns the new array of regrouped elements. + * @example + * + * var zipped = _.zip([1, 2], [10, 20], [100, 200]); + * // => [[1, 10, 100], [2, 20, 200]] + * + * _.unzipWith(zipped, _.add); + * // => [3, 30, 300] + */ + function unzipWith(array, iteratee) { + if (!(array && array.length)) { + return []; + } + var result = unzip(array); + if (iteratee == null) { + return result; + } + return arrayMap(result, function(group) { + return apply(iteratee, undefined, group); + }); + } + + /** + * Creates an array excluding all given values using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * **Note:** Unlike `_.pull`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...*} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.difference, _.xor + * @example + * + * _.without([2, 1, 2, 3], 1, 2); + * // => [3] + */ + var without = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, values) + : []; + }); + + /** + * Creates an array of unique values that is the + * [symmetric difference](https://en.wikipedia.org/wiki/Symmetric_difference) + * of the given arrays. The order of result values is determined by the order + * they occur in the arrays. + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of filtered values. + * @see _.difference, _.without + * @example + * + * _.xor([2, 1], [2, 3]); + * // => [1, 3] + */ + var xor = baseRest(function(arrays) { + return baseXor(arrayFilter(arrays, isArrayLikeObject)); + }); + + /** + * This method is like `_.xor` except that it accepts `iteratee` which is + * invoked for each element of each `arrays` to generate the criterion by + * which by which they're compared. The order of result values is determined + * by the order they occur in the arrays. The iteratee is invoked with one + * argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.xorBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2, 3.4] + * + * // The `_.property` iteratee shorthand. + * _.xorBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ + var xorBy = baseRest(function(arrays) { + var iteratee = last(arrays); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return baseXor(arrayFilter(arrays, isArrayLikeObject), getIteratee(iteratee, 2)); + }); + + /** + * This method is like `_.xor` except that it accepts `comparator` which is + * invoked to compare elements of `arrays`. The order of result values is + * determined by the order they occur in the arrays. The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.xorWith(objects, others, _.isEqual); + * // => [{ 'x': 2, 'y': 1 }, { 'x': 1, 'y': 1 }] + */ + var xorWith = baseRest(function(arrays) { + var comparator = last(arrays); + comparator = typeof comparator == 'function' ? comparator : undefined; + return baseXor(arrayFilter(arrays, isArrayLikeObject), undefined, comparator); + }); + + /** + * Creates an array of grouped elements, the first of which contains the + * first elements of the given arrays, the second of which contains the + * second elements of the given arrays, and so on. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to process. + * @returns {Array} Returns the new array of grouped elements. + * @example + * + * _.zip(['a', 'b'], [1, 2], [true, false]); + * // => [['a', 1, true], ['b', 2, false]] + */ + var zip = baseRest(unzip); + + /** + * This method is like `_.fromPairs` except that it accepts two arrays, + * one of property identifiers and one of corresponding values. + * + * @static + * @memberOf _ + * @since 0.4.0 + * @category Array + * @param {Array} [props=[]] The property identifiers. + * @param {Array} [values=[]] The property values. + * @returns {Object} Returns the new object. + * @example + * + * _.zipObject(['a', 'b'], [1, 2]); + * // => { 'a': 1, 'b': 2 } + */ + function zipObject(props, values) { + return baseZipObject(props || [], values || [], assignValue); + } + + /** + * This method is like `_.zipObject` except that it supports property paths. + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Array + * @param {Array} [props=[]] The property identifiers. + * @param {Array} [values=[]] The property values. + * @returns {Object} Returns the new object. + * @example + * + * _.zipObjectDeep(['a.b[0].c', 'a.b[1].d'], [1, 2]); + * // => { 'a': { 'b': [{ 'c': 1 }, { 'd': 2 }] } } + */ + function zipObjectDeep(props, values) { + return baseZipObject(props || [], values || [], baseSet); + } + + /** + * This method is like `_.zip` except that it accepts `iteratee` to specify + * how grouped values should be combined. The iteratee is invoked with the + * elements of each group: (...group). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Array + * @param {...Array} [arrays] The arrays to process. + * @param {Function} [iteratee=_.identity] The function to combine + * grouped values. + * @returns {Array} Returns the new array of grouped elements. + * @example + * + * _.zipWith([1, 2], [10, 20], [100, 200], function(a, b, c) { + * return a + b + c; + * }); + * // => [111, 222] + */ + var zipWith = baseRest(function(arrays) { + var length = arrays.length, + iteratee = length > 1 ? arrays[length - 1] : undefined; + + iteratee = typeof iteratee == 'function' ? (arrays.pop(), iteratee) : undefined; + return unzipWith(arrays, iteratee); + }); + + /*------------------------------------------------------------------------*/ + + /** + * Creates a `lodash` wrapper instance that wraps `value` with explicit method + * chain sequences enabled. The result of such sequences must be unwrapped + * with `_#value`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Seq + * @param {*} value The value to wrap. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'pebbles', 'age': 1 } + * ]; + * + * var youngest = _ + * .chain(users) + * .sortBy('age') + * .map(function(o) { + * return o.user + ' is ' + o.age; + * }) + * .head() + * .value(); + * // => 'pebbles is 1' + */ + function chain(value) { + var result = lodash(value); + result.__chain__ = true; + return result; + } + + /** + * This method invokes `interceptor` and returns `value`. The interceptor + * is invoked with one argument; (value). The purpose of this method is to + * "tap into" a method chain sequence in order to modify intermediate results. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns `value`. + * @example + * + * _([1, 2, 3]) + * .tap(function(array) { + * // Mutate input array. + * array.pop(); + * }) + * .reverse() + * .value(); + * // => [2, 1] + */ + function tap(value, interceptor) { + interceptor(value); + return value; + } + + /** + * This method is like `_.tap` except that it returns the result of `interceptor`. + * The purpose of this method is to "pass thru" values replacing intermediate + * results in a method chain sequence. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns the result of `interceptor`. + * @example + * + * _(' abc ') + * .chain() + * .trim() + * .thru(function(value) { + * return [value]; + * }) + * .value(); + * // => ['abc'] + */ + function thru(value, interceptor) { + return interceptor(value); + } + + /** + * This method is the wrapper version of `_.at`. + * + * @name at + * @memberOf _ + * @since 1.0.0 + * @category Seq + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _(object).at(['a[0].b.c', 'a[1]']).value(); + * // => [3, 4] + */ + var wrapperAt = flatRest(function(paths) { + var length = paths.length, + start = length ? paths[0] : 0, + value = this.__wrapped__, + interceptor = function(object) { return baseAt(object, paths); }; + + if (length > 1 || this.__actions__.length || + !(value instanceof LazyWrapper) || !isIndex(start)) { + return this.thru(interceptor); + } + value = value.slice(start, +start + (length ? 1 : 0)); + value.__actions__.push({ + 'func': thru, + 'args': [interceptor], + 'thisArg': undefined + }); + return new LodashWrapper(value, this.__chain__).thru(function(array) { + if (length && !array.length) { + array.push(undefined); + } + return array; + }); + }); + + /** + * Creates a `lodash` wrapper instance with explicit method chain sequences enabled. + * + * @name chain + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 } + * ]; + * + * // A sequence without explicit chaining. + * _(users).head(); + * // => { 'user': 'barney', 'age': 36 } + * + * // A sequence with explicit chaining. + * _(users) + * .chain() + * .head() + * .pick('user') + * .value(); + * // => { 'user': 'barney' } + */ + function wrapperChain() { + return chain(this); + } + + /** + * Executes the chain sequence and returns the wrapped result. + * + * @name commit + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2]; + * var wrapped = _(array).push(3); + * + * console.log(array); + * // => [1, 2] + * + * wrapped = wrapped.commit(); + * console.log(array); + * // => [1, 2, 3] + * + * wrapped.last(); + * // => 3 + * + * console.log(array); + * // => [1, 2, 3] + */ + function wrapperCommit() { + return new LodashWrapper(this.value(), this.__chain__); + } + + /** + * Gets the next value on a wrapped object following the + * [iterator protocol](https://mdn.io/iteration_protocols#iterator). + * + * @name next + * @memberOf _ + * @since 4.0.0 + * @category Seq + * @returns {Object} Returns the next iterator value. + * @example + * + * var wrapped = _([1, 2]); + * + * wrapped.next(); + * // => { 'done': false, 'value': 1 } + * + * wrapped.next(); + * // => { 'done': false, 'value': 2 } + * + * wrapped.next(); + * // => { 'done': true, 'value': undefined } + */ + function wrapperNext() { + if (this.__values__ === undefined) { + this.__values__ = toArray(this.value()); + } + var done = this.__index__ >= this.__values__.length, + value = done ? undefined : this.__values__[this.__index__++]; + + return { 'done': done, 'value': value }; + } + + /** + * Enables the wrapper to be iterable. + * + * @name Symbol.iterator + * @memberOf _ + * @since 4.0.0 + * @category Seq + * @returns {Object} Returns the wrapper object. + * @example + * + * var wrapped = _([1, 2]); + * + * wrapped[Symbol.iterator]() === wrapped; + * // => true + * + * Array.from(wrapped); + * // => [1, 2] + */ + function wrapperToIterator() { + return this; + } + + /** + * Creates a clone of the chain sequence planting `value` as the wrapped value. + * + * @name plant + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @param {*} value The value to plant. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2]).map(square); + * var other = wrapped.plant([3, 4]); + * + * other.value(); + * // => [9, 16] + * + * wrapped.value(); + * // => [1, 4] + */ + function wrapperPlant(value) { + var result, + parent = this; + + while (parent instanceof baseLodash) { + var clone = wrapperClone(parent); + clone.__index__ = 0; + clone.__values__ = undefined; + if (result) { + previous.__wrapped__ = clone; + } else { + result = clone; + } + var previous = clone; + parent = parent.__wrapped__; + } + previous.__wrapped__ = value; + return result; + } + + /** + * This method is the wrapper version of `_.reverse`. + * + * **Note:** This method mutates the wrapped array. + * + * @name reverse + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2, 3]; + * + * _(array).reverse().value() + * // => [3, 2, 1] + * + * console.log(array); + * // => [3, 2, 1] + */ + function wrapperReverse() { + var value = this.__wrapped__; + if (value instanceof LazyWrapper) { + var wrapped = value; + if (this.__actions__.length) { + wrapped = new LazyWrapper(this); + } + wrapped = wrapped.reverse(); + wrapped.__actions__.push({ + 'func': thru, + 'args': [reverse], + 'thisArg': undefined + }); + return new LodashWrapper(wrapped, this.__chain__); + } + return this.thru(reverse); + } + + /** + * Executes the chain sequence to resolve the unwrapped value. + * + * @name value + * @memberOf _ + * @since 0.1.0 + * @alias toJSON, valueOf + * @category Seq + * @returns {*} Returns the resolved unwrapped value. + * @example + * + * _([1, 2, 3]).value(); + * // => [1, 2, 3] + */ + function wrapperValue() { + return baseWrapperValue(this.__wrapped__, this.__actions__); + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the number of times the key was returned by `iteratee`. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.countBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': 1, '6': 2 } + * + * // The `_.property` iteratee shorthand. + * _.countBy(['one', 'two', 'three'], 'length'); + * // => { '3': 2, '5': 1 } + */ + var countBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + ++result[key]; + } else { + baseAssignValue(result, key, 1); + } + }); + + /** + * Checks if `predicate` returns truthy for **all** elements of `collection`. + * Iteration is stopped once `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * **Note:** This method returns `true` for + * [empty collections](https://en.wikipedia.org/wiki/Empty_set) because + * [everything is true](https://en.wikipedia.org/wiki/Vacuous_truth) of + * elements of empty collections. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + * @example + * + * _.every([true, 1, null, 'yes'], Boolean); + * // => false + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.every(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.every(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.every(users, 'active'); + * // => false + */ + function every(collection, predicate, guard) { + var func = isArray(collection) ? arrayEvery : baseEvery; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; + } + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Iterates over elements of `collection`, returning an array of all elements + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * **Note:** Unlike `_.remove`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.reject + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * _.filter(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.filter(users, { 'age': 36, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.filter(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.filter(users, 'active'); + * // => objects for ['barney'] + */ + function filter(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Iterates over elements of `collection`, returning the first element + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false }, + * { 'user': 'pebbles', 'age': 1, 'active': true } + * ]; + * + * _.find(users, function(o) { return o.age < 40; }); + * // => object for 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.find(users, { 'age': 1, 'active': true }); + * // => object for 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.find(users, ['active', false]); + * // => object for 'fred' + * + * // The `_.property` iteratee shorthand. + * _.find(users, 'active'); + * // => object for 'barney' + */ + var find = createFind(findIndex); + + /** + * This method is like `_.find` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=collection.length-1] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * _.findLast([1, 2, 3, 4], function(n) { + * return n % 2 == 1; + * }); + * // => 3 + */ + var findLast = createFind(findLastIndex); + + /** + * Creates a flattened array of values by running each element in `collection` + * thru `iteratee` and flattening the mapped results. The iteratee is invoked + * with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [n, n]; + * } + * + * _.flatMap([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ + function flatMap(collection, iteratee) { + return baseFlatten(map(collection, iteratee), 1); + } + + /** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDeep([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ + function flatMapDeep(collection, iteratee) { + return baseFlatten(map(collection, iteratee), INFINITY); + } + + /** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDepth([1, 2], duplicate, 2); + * // => [[1, 1], [2, 2]] + */ + function flatMapDepth(collection, iteratee, depth) { + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(map(collection, iteratee), depth); + } + + /** + * Iterates over elements of `collection` and invokes `iteratee` for each element. + * The iteratee is invoked with three arguments: (value, index|key, collection). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * **Note:** As with other "Collections" methods, objects with a "length" + * property are iterated like arrays. To avoid this behavior use `_.forIn` + * or `_.forOwn` for object iteration. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias each + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEachRight + * @example + * + * _.forEach([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `1` then `2`. + * + * _.forEach({ 'a': 1, 'b': 2 }, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forEach(collection, iteratee) { + var func = isArray(collection) ? arrayEach : baseEach; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.forEach` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @alias eachRight + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEach + * @example + * + * _.forEachRight([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `2` then `1`. + */ + function forEachRight(collection, iteratee) { + var func = isArray(collection) ? arrayEachRight : baseEachRight; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The order of grouped values + * is determined by the order they occur in `collection`. The corresponding + * value of each key is an array of elements responsible for generating the + * key. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.groupBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': [4.2], '6': [6.1, 6.3] } + * + * // The `_.property` iteratee shorthand. + * _.groupBy(['one', 'two', 'three'], 'length'); + * // => { '3': ['one', 'two'], '5': ['three'] } + */ + var groupBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + result[key].push(value); + } else { + baseAssignValue(result, key, [value]); + } + }); + + /** + * Checks if `value` is in `collection`. If `collection` is a string, it's + * checked for a substring of `value`, otherwise + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * is used for equality comparisons. If `fromIndex` is negative, it's used as + * the offset from the end of `collection`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {boolean} Returns `true` if `value` is found, else `false`. + * @example + * + * _.includes([1, 2, 3], 1); + * // => true + * + * _.includes([1, 2, 3], 1, 2); + * // => false + * + * _.includes({ 'a': 1, 'b': 2 }, 1); + * // => true + * + * _.includes('abcd', 'bc'); + * // => true + */ + function includes(collection, value, fromIndex, guard) { + collection = isArrayLike(collection) ? collection : values(collection); + fromIndex = (fromIndex && !guard) ? toInteger(fromIndex) : 0; + + var length = collection.length; + if (fromIndex < 0) { + fromIndex = nativeMax(length + fromIndex, 0); + } + return isString(collection) + ? (fromIndex <= length && collection.indexOf(value, fromIndex) > -1) + : (!!length && baseIndexOf(collection, value, fromIndex) > -1); + } + + /** + * Invokes the method at `path` of each element in `collection`, returning + * an array of the results of each invoked method. Any additional arguments + * are provided to each invoked method. If `path` is a function, it's invoked + * for, and `this` bound to, each element in `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array|Function|string} path The path of the method to invoke or + * the function invoked per iteration. + * @param {...*} [args] The arguments to invoke each method with. + * @returns {Array} Returns the array of results. + * @example + * + * _.invokeMap([[5, 1, 7], [3, 2, 1]], 'sort'); + * // => [[1, 5, 7], [1, 2, 3]] + * + * _.invokeMap([123, 456], String.prototype.split, ''); + * // => [['1', '2', '3'], ['4', '5', '6']] + */ + var invokeMap = baseRest(function(collection, path, args) { + var index = -1, + isFunc = typeof path == 'function', + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value) { + result[++index] = isFunc ? apply(path, value, args) : baseInvoke(value, path, args); + }); + return result; + }); + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the last element responsible for generating the key. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * var array = [ + * { 'dir': 'left', 'code': 97 }, + * { 'dir': 'right', 'code': 100 } + * ]; + * + * _.keyBy(array, function(o) { + * return String.fromCharCode(o.code); + * }); + * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } + * + * _.keyBy(array, 'dir'); + * // => { 'left': { 'dir': 'left', 'code': 97 }, 'right': { 'dir': 'right', 'code': 100 } } + */ + var keyBy = createAggregator(function(result, value, key) { + baseAssignValue(result, key, value); + }); + + /** + * Creates an array of values by running each element in `collection` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.every`, `_.filter`, `_.map`, `_.mapValues`, `_.reject`, and `_.some`. + * + * The guarded methods are: + * `ary`, `chunk`, `curry`, `curryRight`, `drop`, `dropRight`, `every`, + * `fill`, `invert`, `parseInt`, `random`, `range`, `rangeRight`, `repeat`, + * `sampleSize`, `slice`, `some`, `sortBy`, `split`, `take`, `takeRight`, + * `template`, `trim`, `trimEnd`, `trimStart`, and `words` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + * @example + * + * function square(n) { + * return n * n; + * } + * + * _.map([4, 8], square); + * // => [16, 64] + * + * _.map({ 'a': 4, 'b': 8 }, square); + * // => [16, 64] (iteration order is not guaranteed) + * + * var users = [ + * { 'user': 'barney' }, + * { 'user': 'fred' } + * ]; + * + * // The `_.property` iteratee shorthand. + * _.map(users, 'user'); + * // => ['barney', 'fred'] + */ + function map(collection, iteratee) { + var func = isArray(collection) ? arrayMap : baseMap; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.sortBy` except that it allows specifying the sort + * orders of the iteratees to sort by. If `orders` is unspecified, all values + * are sorted in ascending order. Otherwise, specify an order of "desc" for + * descending or "asc" for ascending sort order of corresponding values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array[]|Function[]|Object[]|string[]} [iteratees=[_.identity]] + * The iteratees to sort by. + * @param {string[]} [orders] The sort orders of `iteratees`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 34 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'barney', 'age': 36 } + * ]; + * + * // Sort by `user` in ascending order and by `age` in descending order. + * _.orderBy(users, ['user', 'age'], ['asc', 'desc']); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 40]] + */ + function orderBy(collection, iteratees, orders, guard) { + if (collection == null) { + return []; + } + if (!isArray(iteratees)) { + iteratees = iteratees == null ? [] : [iteratees]; + } + orders = guard ? undefined : orders; + if (!isArray(orders)) { + orders = orders == null ? [] : [orders]; + } + return baseOrderBy(collection, iteratees, orders); + } + + /** + * Creates an array of elements split into two groups, the first of which + * contains elements `predicate` returns truthy for, the second of which + * contains elements `predicate` returns falsey for. The predicate is + * invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the array of grouped elements. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': true }, + * { 'user': 'pebbles', 'age': 1, 'active': false } + * ]; + * + * _.partition(users, function(o) { return o.active; }); + * // => objects for [['fred'], ['barney', 'pebbles']] + * + * // The `_.matches` iteratee shorthand. + * _.partition(users, { 'age': 1, 'active': false }); + * // => objects for [['pebbles'], ['barney', 'fred']] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.partition(users, ['active', false]); + * // => objects for [['barney', 'pebbles'], ['fred']] + * + * // The `_.property` iteratee shorthand. + * _.partition(users, 'active'); + * // => objects for [['fred'], ['barney', 'pebbles']] + */ + var partition = createAggregator(function(result, value, key) { + result[key ? 0 : 1].push(value); + }, function() { return [[], []]; }); + + /** + * Reduces `collection` to a value which is the accumulated result of running + * each element in `collection` thru `iteratee`, where each successive + * invocation is supplied the return value of the previous. If `accumulator` + * is not given, the first element of `collection` is used as the initial + * value. The iteratee is invoked with four arguments: + * (accumulator, value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.reduce`, `_.reduceRight`, and `_.transform`. + * + * The guarded methods are: + * `assign`, `defaults`, `defaultsDeep`, `includes`, `merge`, `orderBy`, + * and `sortBy` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduceRight + * @example + * + * _.reduce([1, 2], function(sum, n) { + * return sum + n; + * }, 0); + * // => 3 + * + * _.reduce({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * return result; + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } (iteration order is not guaranteed) + */ + function reduce(collection, iteratee, accumulator) { + var func = isArray(collection) ? arrayReduce : baseReduce, + initAccum = arguments.length < 3; + + return func(collection, getIteratee(iteratee, 4), accumulator, initAccum, baseEach); + } + + /** + * This method is like `_.reduce` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduce + * @example + * + * var array = [[0, 1], [2, 3], [4, 5]]; + * + * _.reduceRight(array, function(flattened, other) { + * return flattened.concat(other); + * }, []); + * // => [4, 5, 2, 3, 0, 1] + */ + function reduceRight(collection, iteratee, accumulator) { + var func = isArray(collection) ? arrayReduceRight : baseReduce, + initAccum = arguments.length < 3; + + return func(collection, getIteratee(iteratee, 4), accumulator, initAccum, baseEachRight); + } + + /** + * The opposite of `_.filter`; this method returns the elements of `collection` + * that `predicate` does **not** return truthy for. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.filter + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': true } + * ]; + * + * _.reject(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.reject(users, { 'age': 40, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.reject(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.reject(users, 'active'); + * // => objects for ['barney'] + */ + function reject(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, negate(getIteratee(predicate, 3))); + } + + /** + * Gets a random element from `collection`. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + * @example + * + * _.sample([1, 2, 3, 4]); + * // => 2 + */ + function sample(collection) { + var func = isArray(collection) ? arraySample : baseSample; + return func(collection); + } + + /** + * Gets `n` random elements at unique keys from `collection` up to the + * size of `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to sample. + * @param {number} [n=1] The number of elements to sample. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the random elements. + * @example + * + * _.sampleSize([1, 2, 3], 2); + * // => [3, 1] + * + * _.sampleSize([1, 2, 3], 4); + * // => [2, 3, 1] + */ + function sampleSize(collection, n, guard) { + if ((guard ? isIterateeCall(collection, n, guard) : n === undefined)) { + n = 1; + } else { + n = toInteger(n); + } + var func = isArray(collection) ? arraySampleSize : baseSampleSize; + return func(collection, n); + } + + /** + * Creates an array of shuffled values, using a version of the + * [Fisher-Yates shuffle](https://en.wikipedia.org/wiki/Fisher-Yates_shuffle). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + * @example + * + * _.shuffle([1, 2, 3, 4]); + * // => [4, 1, 3, 2] + */ + function shuffle(collection) { + var func = isArray(collection) ? arrayShuffle : baseShuffle; + return func(collection); + } + + /** + * Gets the size of `collection` by returning its length for array-like + * values or the number of own enumerable string keyed properties for objects. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @returns {number} Returns the collection size. + * @example + * + * _.size([1, 2, 3]); + * // => 3 + * + * _.size({ 'a': 1, 'b': 2 }); + * // => 2 + * + * _.size('pebbles'); + * // => 7 + */ + function size(collection) { + if (collection == null) { + return 0; + } + if (isArrayLike(collection)) { + return isString(collection) ? stringSize(collection) : collection.length; + } + var tag = getTag(collection); + if (tag == mapTag || tag == setTag) { + return collection.size; + } + return baseKeys(collection).length; + } + + /** + * Checks if `predicate` returns truthy for **any** element of `collection`. + * Iteration is stopped once `predicate` returns truthy. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + * @example + * + * _.some([null, 0, 'yes', false], Boolean); + * // => true + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.some(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.some(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.some(users, 'active'); + * // => true + */ + function some(collection, predicate, guard) { + var func = isArray(collection) ? arraySome : baseSome; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; + } + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Creates an array of elements, sorted in ascending order by the results of + * running each element in a collection thru each iteratee. This method + * performs a stable sort, that is, it preserves the original sort order of + * equal elements. The iteratees are invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {...(Function|Function[])} [iteratees=[_.identity]] + * The iteratees to sort by. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'barney', 'age': 34 } + * ]; + * + * _.sortBy(users, [function(o) { return o.user; }]); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 40]] + * + * _.sortBy(users, ['user', 'age']); + * // => objects for [['barney', 34], ['barney', 36], ['fred', 40], ['fred', 48]] + */ + var sortBy = baseRest(function(collection, iteratees) { + if (collection == null) { + return []; + } + var length = iteratees.length; + if (length > 1 && isIterateeCall(collection, iteratees[0], iteratees[1])) { + iteratees = []; + } else if (length > 2 && isIterateeCall(iteratees[0], iteratees[1], iteratees[2])) { + iteratees = [iteratees[0]]; + } + return baseOrderBy(collection, baseFlatten(iteratees, 1), []); + }); + + /*------------------------------------------------------------------------*/ + + /** + * Gets the timestamp of the number of milliseconds that have elapsed since + * the Unix epoch (1 January 1970 00:00:00 UTC). + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Date + * @returns {number} Returns the timestamp. + * @example + * + * _.defer(function(stamp) { + * console.log(_.now() - stamp); + * }, _.now()); + * // => Logs the number of milliseconds it took for the deferred invocation. + */ + var now = ctxNow || function() { + return root.Date.now(); + }; + + /*------------------------------------------------------------------------*/ + + /** + * The opposite of `_.before`; this method creates a function that invokes + * `func` once it's called `n` or more times. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {number} n The number of calls before `func` is invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var saves = ['profile', 'settings']; + * + * var done = _.after(saves.length, function() { + * console.log('done saving!'); + * }); + * + * _.forEach(saves, function(type) { + * asyncSave({ 'type': type, 'complete': done }); + * }); + * // => Logs 'done saving!' after the two async saves have completed. + */ + function after(n, func) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n < 1) { + return func.apply(this, arguments); + } + }; + } + + /** + * Creates a function that invokes `func`, with up to `n` arguments, + * ignoring any additional arguments. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @param {number} [n=func.length] The arity cap. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.ary(parseInt, 1)); + * // => [6, 8, 10] + */ + function ary(func, n, guard) { + n = guard ? undefined : n; + n = (func && n == null) ? func.length : n; + return createWrap(func, WRAP_ARY_FLAG, undefined, undefined, undefined, undefined, n); + } + + /** + * Creates a function that invokes `func`, with the `this` binding and arguments + * of the created function, while it's called less than `n` times. Subsequent + * calls to the created function return the result of the last `func` invocation. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {number} n The number of calls at which `func` is no longer invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * jQuery(element).on('click', _.before(5, addContactToList)); + * // => Allows adding up to 4 contacts to the list. + */ + function before(n, func) { + var result; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n > 0) { + result = func.apply(this, arguments); + } + if (n <= 1) { + func = undefined; + } + return result; + }; + } + + /** + * Creates a function that invokes `func` with the `this` binding of `thisArg` + * and `partials` prepended to the arguments it receives. + * + * The `_.bind.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for partially applied arguments. + * + * **Note:** Unlike native `Function#bind`, this method doesn't set the "length" + * property of bound functions. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to bind. + * @param {*} thisArg The `this` binding of `func`. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * function greet(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * + * var object = { 'user': 'fred' }; + * + * var bound = _.bind(greet, object, 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * // Bound with placeholders. + * var bound = _.bind(greet, object, _, '!'); + * bound('hi'); + * // => 'hi fred!' + */ + var bind = baseRest(function(func, thisArg, partials) { + var bitmask = WRAP_BIND_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bind)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(func, bitmask, thisArg, partials, holders); + }); + + /** + * Creates a function that invokes the method at `object[key]` with `partials` + * prepended to the arguments it receives. + * + * This method differs from `_.bind` by allowing bound functions to reference + * methods that may be redefined or don't yet exist. See + * [Peter Michaux's article](http://peter.michaux.ca/articles/lazy-function-definition-pattern) + * for more details. + * + * The `_.bindKey.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Function + * @param {Object} object The object to invoke the method on. + * @param {string} key The key of the method. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * var object = { + * 'user': 'fred', + * 'greet': function(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * }; + * + * var bound = _.bindKey(object, 'greet', 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * object.greet = function(greeting, punctuation) { + * return greeting + 'ya ' + this.user + punctuation; + * }; + * + * bound('!'); + * // => 'hiya fred!' + * + * // Bound with placeholders. + * var bound = _.bindKey(object, 'greet', _, '!'); + * bound('hi'); + * // => 'hiya fred!' + */ + var bindKey = baseRest(function(object, key, partials) { + var bitmask = WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bindKey)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(key, bitmask, object, partials, holders); + }); + + /** + * Creates a function that accepts arguments of `func` and either invokes + * `func` returning its result, if at least `arity` number of arguments have + * been provided, or returns a function that accepts the remaining `func` + * arguments, and so on. The arity of `func` may be specified if `func.length` + * is not sufficient. + * + * The `_.curry.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curry(abc); + * + * curried(1)(2)(3); + * // => [1, 2, 3] + * + * curried(1, 2)(3); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(1)(_, 3)(2); + * // => [1, 2, 3] + */ + function curry(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curry.placeholder; + return result; + } + + /** + * This method is like `_.curry` except that arguments are applied to `func` + * in the manner of `_.partialRight` instead of `_.partial`. + * + * The `_.curryRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curryRight(abc); + * + * curried(3)(2)(1); + * // => [1, 2, 3] + * + * curried(2, 3)(1); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(3)(1, _)(2); + * // => [1, 2, 3] + */ + function curryRight(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_RIGHT_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curryRight.placeholder; + return result; + } + + /** + * Creates a debounced function that delays invoking `func` until after `wait` + * milliseconds have elapsed since the last time the debounced function was + * invoked. The debounced function comes with a `cancel` method to cancel + * delayed `func` invocations and a `flush` method to immediately invoke them. + * Provide `options` to indicate whether `func` should be invoked on the + * leading and/or trailing edge of the `wait` timeout. The `func` is invoked + * with the last arguments provided to the debounced function. Subsequent + * calls to the debounced function return the result of the last `func` + * invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the debounced function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.debounce` and `_.throttle`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to debounce. + * @param {number} [wait=0] The number of milliseconds to delay. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=false] + * Specify invoking on the leading edge of the timeout. + * @param {number} [options.maxWait] + * The maximum time `func` is allowed to be delayed before it's invoked. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new debounced function. + * @example + * + * // Avoid costly calculations while the window size is in flux. + * jQuery(window).on('resize', _.debounce(calculateLayout, 150)); + * + * // Invoke `sendMail` when clicked, debouncing subsequent calls. + * jQuery(element).on('click', _.debounce(sendMail, 300, { + * 'leading': true, + * 'trailing': false + * })); + * + * // Ensure `batchLog` is invoked once after 1 second of debounced calls. + * var debounced = _.debounce(batchLog, 250, { 'maxWait': 1000 }); + * var source = new EventSource('/stream'); + * jQuery(source).on('message', debounced); + * + * // Cancel the trailing debounced invocation. + * jQuery(window).on('popstate', debounced.cancel); + */ + function debounce(func, wait, options) { + var lastArgs, + lastThis, + maxWait, + result, + timerId, + lastCallTime, + lastInvokeTime = 0, + leading = false, + maxing = false, + trailing = true; + + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + wait = toNumber(wait) || 0; + if (isObject(options)) { + leading = !!options.leading; + maxing = 'maxWait' in options; + maxWait = maxing ? nativeMax(toNumber(options.maxWait) || 0, wait) : maxWait; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } + + function invokeFunc(time) { + var args = lastArgs, + thisArg = lastThis; + + lastArgs = lastThis = undefined; + lastInvokeTime = time; + result = func.apply(thisArg, args); + return result; + } + + function leadingEdge(time) { + // Reset any `maxWait` timer. + lastInvokeTime = time; + // Start the timer for the trailing edge. + timerId = setTimeout(timerExpired, wait); + // Invoke the leading edge. + return leading ? invokeFunc(time) : result; + } + + function remainingWait(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime, + timeWaiting = wait - timeSinceLastCall; + + return maxing + ? nativeMin(timeWaiting, maxWait - timeSinceLastInvoke) + : timeWaiting; + } + + function shouldInvoke(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime; + + // Either this is the first call, activity has stopped and we're at the + // trailing edge, the system time has gone backwards and we're treating + // it as the trailing edge, or we've hit the `maxWait` limit. + return (lastCallTime === undefined || (timeSinceLastCall >= wait) || + (timeSinceLastCall < 0) || (maxing && timeSinceLastInvoke >= maxWait)); + } + + function timerExpired() { + var time = now(); + if (shouldInvoke(time)) { + return trailingEdge(time); + } + // Restart the timer. + timerId = setTimeout(timerExpired, remainingWait(time)); + } + + function trailingEdge(time) { + timerId = undefined; + + // Only invoke if we have `lastArgs` which means `func` has been + // debounced at least once. + if (trailing && lastArgs) { + return invokeFunc(time); + } + lastArgs = lastThis = undefined; + return result; + } + + function cancel() { + if (timerId !== undefined) { + clearTimeout(timerId); + } + lastInvokeTime = 0; + lastArgs = lastCallTime = lastThis = timerId = undefined; + } + + function flush() { + return timerId === undefined ? result : trailingEdge(now()); + } + + function debounced() { + var time = now(), + isInvoking = shouldInvoke(time); + + lastArgs = arguments; + lastThis = this; + lastCallTime = time; + + if (isInvoking) { + if (timerId === undefined) { + return leadingEdge(lastCallTime); + } + if (maxing) { + // Handle invocations in a tight loop. + timerId = setTimeout(timerExpired, wait); + return invokeFunc(lastCallTime); + } + } + if (timerId === undefined) { + timerId = setTimeout(timerExpired, wait); + } + return result; + } + debounced.cancel = cancel; + debounced.flush = flush; + return debounced; + } + + /** + * Defers invoking the `func` until the current call stack has cleared. Any + * additional arguments are provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to defer. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.defer(function(text) { + * console.log(text); + * }, 'deferred'); + * // => Logs 'deferred' after one millisecond. + */ + var defer = baseRest(function(func, args) { + return baseDelay(func, 1, args); + }); + + /** + * Invokes `func` after `wait` milliseconds. Any additional arguments are + * provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.delay(function(text) { + * console.log(text); + * }, 1000, 'later'); + * // => Logs 'later' after one second. + */ + var delay = baseRest(function(func, wait, args) { + return baseDelay(func, toNumber(wait) || 0, args); + }); + + /** + * Creates a function that invokes `func` with arguments reversed. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to flip arguments for. + * @returns {Function} Returns the new flipped function. + * @example + * + * var flipped = _.flip(function() { + * return _.toArray(arguments); + * }); + * + * flipped('a', 'b', 'c', 'd'); + * // => ['d', 'c', 'b', 'a'] + */ + function flip(func) { + return createWrap(func, WRAP_FLIP_FLAG); + } + + /** + * Creates a function that memoizes the result of `func`. If `resolver` is + * provided, it determines the cache key for storing the result based on the + * arguments provided to the memoized function. By default, the first argument + * provided to the memoized function is used as the map cache key. The `func` + * is invoked with the `this` binding of the memoized function. + * + * **Note:** The cache is exposed as the `cache` property on the memoized + * function. Its creation may be customized by replacing the `_.memoize.Cache` + * constructor with one whose instances implement the + * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) + * method interface of `clear`, `delete`, `get`, `has`, and `set`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to have its output memoized. + * @param {Function} [resolver] The function to resolve the cache key. + * @returns {Function} Returns the new memoized function. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * var other = { 'c': 3, 'd': 4 }; + * + * var values = _.memoize(_.values); + * values(object); + * // => [1, 2] + * + * values(other); + * // => [3, 4] + * + * object.a = 2; + * values(object); + * // => [1, 2] + * + * // Modify the result cache. + * values.cache.set(object, ['a', 'b']); + * values(object); + * // => ['a', 'b'] + * + * // Replace `_.memoize.Cache`. + * _.memoize.Cache = WeakMap; + */ + function memoize(func, resolver) { + if (typeof func != 'function' || (resolver != null && typeof resolver != 'function')) { + throw new TypeError(FUNC_ERROR_TEXT); + } + var memoized = function() { + var args = arguments, + key = resolver ? resolver.apply(this, args) : args[0], + cache = memoized.cache; + + if (cache.has(key)) { + return cache.get(key); + } + var result = func.apply(this, args); + memoized.cache = cache.set(key, result) || cache; + return result; + }; + memoized.cache = new (memoize.Cache || MapCache); + return memoized; + } + + // Expose `MapCache`. + memoize.Cache = MapCache; + + /** + * Creates a function that negates the result of the predicate `func`. The + * `func` predicate is invoked with the `this` binding and arguments of the + * created function. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} predicate The predicate to negate. + * @returns {Function} Returns the new negated function. + * @example + * + * function isEven(n) { + * return n % 2 == 0; + * } + * + * _.filter([1, 2, 3, 4, 5, 6], _.negate(isEven)); + * // => [1, 3, 5] + */ + function negate(predicate) { + if (typeof predicate != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return function() { + var args = arguments; + switch (args.length) { + case 0: return !predicate.call(this); + case 1: return !predicate.call(this, args[0]); + case 2: return !predicate.call(this, args[0], args[1]); + case 3: return !predicate.call(this, args[0], args[1], args[2]); + } + return !predicate.apply(this, args); + }; + } + + /** + * Creates a function that is restricted to invoking `func` once. Repeat calls + * to the function return the value of the first invocation. The `func` is + * invoked with the `this` binding and arguments of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var initialize = _.once(createApplication); + * initialize(); + * initialize(); + * // => `createApplication` is invoked once + */ + function once(func) { + return before(2, func); + } + + /** + * Creates a function that invokes `func` with its arguments transformed. + * + * @static + * @since 4.0.0 + * @memberOf _ + * @category Function + * @param {Function} func The function to wrap. + * @param {...(Function|Function[])} [transforms=[_.identity]] + * The argument transforms. + * @returns {Function} Returns the new function. + * @example + * + * function doubled(n) { + * return n * 2; + * } + * + * function square(n) { + * return n * n; + * } + * + * var func = _.overArgs(function(x, y) { + * return [x, y]; + * }, [square, doubled]); + * + * func(9, 3); + * // => [81, 6] + * + * func(10, 5); + * // => [100, 10] + */ + var overArgs = castRest(function(func, transforms) { + transforms = (transforms.length == 1 && isArray(transforms[0])) + ? arrayMap(transforms[0], baseUnary(getIteratee())) + : arrayMap(baseFlatten(transforms, 1), baseUnary(getIteratee())); + + var funcsLength = transforms.length; + return baseRest(function(args) { + var index = -1, + length = nativeMin(args.length, funcsLength); + + while (++index < length) { + args[index] = transforms[index].call(this, args[index]); + } + return apply(func, this, args); + }); + }); + + /** + * Creates a function that invokes `func` with `partials` prepended to the + * arguments it receives. This method is like `_.bind` except it does **not** + * alter the `this` binding. + * + * The `_.partial.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * **Note:** This method doesn't set the "length" property of partially + * applied functions. + * + * @static + * @memberOf _ + * @since 0.2.0 + * @category Function + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * function greet(greeting, name) { + * return greeting + ' ' + name; + * } + * + * var sayHelloTo = _.partial(greet, 'hello'); + * sayHelloTo('fred'); + * // => 'hello fred' + * + * // Partially applied with placeholders. + * var greetFred = _.partial(greet, _, 'fred'); + * greetFred('hi'); + * // => 'hi fred' + */ + var partial = baseRest(function(func, partials) { + var holders = replaceHolders(partials, getHolder(partial)); + return createWrap(func, WRAP_PARTIAL_FLAG, undefined, partials, holders); + }); + + /** + * This method is like `_.partial` except that partially applied arguments + * are appended to the arguments it receives. + * + * The `_.partialRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * **Note:** This method doesn't set the "length" property of partially + * applied functions. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Function + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * function greet(greeting, name) { + * return greeting + ' ' + name; + * } + * + * var greetFred = _.partialRight(greet, 'fred'); + * greetFred('hi'); + * // => 'hi fred' + * + * // Partially applied with placeholders. + * var sayHelloTo = _.partialRight(greet, 'hello', _); + * sayHelloTo('fred'); + * // => 'hello fred' + */ + var partialRight = baseRest(function(func, partials) { + var holders = replaceHolders(partials, getHolder(partialRight)); + return createWrap(func, WRAP_PARTIAL_RIGHT_FLAG, undefined, partials, holders); + }); + + /** + * Creates a function that invokes `func` with arguments arranged according + * to the specified `indexes` where the argument value at the first index is + * provided as the first argument, the argument value at the second index is + * provided as the second argument, and so on. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to rearrange arguments for. + * @param {...(number|number[])} indexes The arranged argument indexes. + * @returns {Function} Returns the new function. + * @example + * + * var rearged = _.rearg(function(a, b, c) { + * return [a, b, c]; + * }, [2, 0, 1]); + * + * rearged('b', 'c', 'a') + * // => ['a', 'b', 'c'] + */ + var rearg = flatRest(function(func, indexes) { + return createWrap(func, WRAP_REARG_FLAG, undefined, undefined, undefined, indexes); + }); + + /** + * Creates a function that invokes `func` with the `this` binding of the + * created function and arguments from `start` and beyond provided as + * an array. + * + * **Note:** This method is based on the + * [rest parameter](https://mdn.io/rest_parameters). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + * @example + * + * var say = _.rest(function(what, names) { + * return what + ' ' + _.initial(names).join(', ') + + * (_.size(names) > 1 ? ', & ' : '') + _.last(names); + * }); + * + * say('hello', 'fred', 'barney', 'pebbles'); + * // => 'hello fred, barney, & pebbles' + */ + function rest(func, start) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + start = start === undefined ? start : toInteger(start); + return baseRest(func, start); + } + + /** + * Creates a function that invokes `func` with the `this` binding of the + * create function and an array of arguments much like + * [`Function#apply`](http://www.ecma-international.org/ecma-262/7.0/#sec-function.prototype.apply). + * + * **Note:** This method is based on the + * [spread operator](https://mdn.io/spread_operator). + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Function + * @param {Function} func The function to spread arguments over. + * @param {number} [start=0] The start position of the spread. + * @returns {Function} Returns the new function. + * @example + * + * var say = _.spread(function(who, what) { + * return who + ' says ' + what; + * }); + * + * say(['fred', 'hello']); + * // => 'fred says hello' + * + * var numbers = Promise.all([ + * Promise.resolve(40), + * Promise.resolve(36) + * ]); + * + * numbers.then(_.spread(function(x, y) { + * return x + y; + * })); + * // => a Promise of 76 + */ + function spread(func, start) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + start = start == null ? 0 : nativeMax(toInteger(start), 0); + return baseRest(function(args) { + var array = args[start], + otherArgs = castSlice(args, 0, start); + + if (array) { + arrayPush(otherArgs, array); + } + return apply(func, this, otherArgs); + }); + } + + /** + * Creates a throttled function that only invokes `func` at most once per + * every `wait` milliseconds. The throttled function comes with a `cancel` + * method to cancel delayed `func` invocations and a `flush` method to + * immediately invoke them. Provide `options` to indicate whether `func` + * should be invoked on the leading and/or trailing edge of the `wait` + * timeout. The `func` is invoked with the last arguments provided to the + * throttled function. Subsequent calls to the throttled function return the + * result of the last `func` invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the throttled function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.throttle` and `_.debounce`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to throttle. + * @param {number} [wait=0] The number of milliseconds to throttle invocations to. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=true] + * Specify invoking on the leading edge of the timeout. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new throttled function. + * @example + * + * // Avoid excessively updating the position while scrolling. + * jQuery(window).on('scroll', _.throttle(updatePosition, 100)); + * + * // Invoke `renewToken` when the click event is fired, but not more than once every 5 minutes. + * var throttled = _.throttle(renewToken, 300000, { 'trailing': false }); + * jQuery(element).on('click', throttled); + * + * // Cancel the trailing throttled invocation. + * jQuery(window).on('popstate', throttled.cancel); + */ + function throttle(func, wait, options) { + var leading = true, + trailing = true; + + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (isObject(options)) { + leading = 'leading' in options ? !!options.leading : leading; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } + return debounce(func, wait, { + 'leading': leading, + 'maxWait': wait, + 'trailing': trailing + }); + } + + /** + * Creates a function that accepts up to one argument, ignoring any + * additional arguments. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.unary(parseInt)); + * // => [6, 8, 10] + */ + function unary(func) { + return ary(func, 1); + } + + /** + * Creates a function that provides `value` to `wrapper` as its first + * argument. Any additional arguments provided to the function are appended + * to those provided to the `wrapper`. The wrapper is invoked with the `this` + * binding of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {*} value The value to wrap. + * @param {Function} [wrapper=identity] The wrapper function. + * @returns {Function} Returns the new function. + * @example + * + * var p = _.wrap(_.escape, function(func, text) { + * return '

' + func(text) + '

'; + * }); + * + * p('fred, barney, & pebbles'); + * // => '

fred, barney, & pebbles

' + */ + function wrap(value, wrapper) { + return partial(castFunction(wrapper), value); + } + + /*------------------------------------------------------------------------*/ + + /** + * Casts `value` as an array if it's not one. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Lang + * @param {*} value The value to inspect. + * @returns {Array} Returns the cast array. + * @example + * + * _.castArray(1); + * // => [1] + * + * _.castArray({ 'a': 1 }); + * // => [{ 'a': 1 }] + * + * _.castArray('abc'); + * // => ['abc'] + * + * _.castArray(null); + * // => [null] + * + * _.castArray(undefined); + * // => [undefined] + * + * _.castArray(); + * // => [] + * + * var array = [1, 2, 3]; + * console.log(_.castArray(array) === array); + * // => true + */ + function castArray() { + if (!arguments.length) { + return []; + } + var value = arguments[0]; + return isArray(value) ? value : [value]; + } + + /** + * Creates a shallow clone of `value`. + * + * **Note:** This method is loosely based on the + * [structured clone algorithm](https://mdn.io/Structured_clone_algorithm) + * and supports cloning arrays, array buffers, booleans, date objects, maps, + * numbers, `Object` objects, regexes, sets, strings, symbols, and typed + * arrays. The own enumerable properties of `arguments` objects are cloned + * as plain objects. An empty object is returned for uncloneable values such + * as error objects, functions, DOM nodes, and WeakMaps. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to clone. + * @returns {*} Returns the cloned value. + * @see _.cloneDeep + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var shallow = _.clone(objects); + * console.log(shallow[0] === objects[0]); + * // => true + */ + function clone(value) { + return baseClone(value, CLONE_SYMBOLS_FLAG); + } + + /** + * This method is like `_.clone` except that it accepts `customizer` which + * is invoked to produce the cloned value. If `customizer` returns `undefined`, + * cloning is handled by the method instead. The `customizer` is invoked with + * up to four arguments; (value [, index|key, object, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the cloned value. + * @see _.cloneDeepWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(false); + * } + * } + * + * var el = _.cloneWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 0 + */ + function cloneWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_SYMBOLS_FLAG, customizer); + } + + /** + * This method is like `_.clone` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @returns {*} Returns the deep cloned value. + * @see _.clone + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var deep = _.cloneDeep(objects); + * console.log(deep[0] === objects[0]); + * // => false + */ + function cloneDeep(value) { + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG); + } + + /** + * This method is like `_.cloneWith` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the deep cloned value. + * @see _.cloneWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(true); + * } + * } + * + * var el = _.cloneDeepWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 20 + */ + function cloneDeepWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG, customizer); + } + + /** + * Checks if `object` conforms to `source` by invoking the predicate + * properties of `source` with the corresponding property values of `object`. + * + * **Note:** This method is equivalent to `_.conforms` when `source` is + * partially applied. + * + * @static + * @memberOf _ + * @since 4.14.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.conformsTo(object, { 'b': function(n) { return n > 1; } }); + * // => true + * + * _.conformsTo(object, { 'b': function(n) { return n > 2; } }); + * // => false + */ + function conformsTo(object, source) { + return source == null || baseConformsTo(object, source, keys(source)); + } + + /** + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true + */ + function eq(value, other) { + return value === other || (value !== value && other !== other); + } + + /** + * Checks if `value` is greater than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + * @see _.lt + * @example + * + * _.gt(3, 1); + * // => true + * + * _.gt(3, 3); + * // => false + * + * _.gt(1, 3); + * // => false + */ + var gt = createRelationalOperation(baseGt); + + /** + * Checks if `value` is greater than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than or equal to + * `other`, else `false`. + * @see _.lte + * @example + * + * _.gte(3, 1); + * // => true + * + * _.gte(3, 3); + * // => true + * + * _.gte(1, 3); + * // => false + */ + var gte = createRelationalOperation(function(value, other) { + return value >= other; + }); + + /** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ + var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { + return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && + !propertyIsEnumerable.call(value, 'callee'); + }; + + /** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ + var isArray = Array.isArray; + + /** + * Checks if `value` is classified as an `ArrayBuffer` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + * @example + * + * _.isArrayBuffer(new ArrayBuffer(2)); + * // => true + * + * _.isArrayBuffer(new Array(2)); + * // => false + */ + var isArrayBuffer = nodeIsArrayBuffer ? baseUnary(nodeIsArrayBuffer) : baseIsArrayBuffer; + + /** + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example + * + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false + */ + function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); + } + + /** + * This method is like `_.isArrayLike` except that it also checks if `value` + * is an object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array-like object, + * else `false`. + * @example + * + * _.isArrayLikeObject([1, 2, 3]); + * // => true + * + * _.isArrayLikeObject(document.body.children); + * // => true + * + * _.isArrayLikeObject('abc'); + * // => false + * + * _.isArrayLikeObject(_.noop); + * // => false + */ + function isArrayLikeObject(value) { + return isObjectLike(value) && isArrayLike(value); + } + + /** + * Checks if `value` is classified as a boolean primitive or object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a boolean, else `false`. + * @example + * + * _.isBoolean(false); + * // => true + * + * _.isBoolean(null); + * // => false + */ + function isBoolean(value) { + return value === true || value === false || + (isObjectLike(value) && baseGetTag(value) == boolTag); + } + + /** + * Checks if `value` is a buffer. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. + * @example + * + * _.isBuffer(new Buffer(2)); + * // => true + * + * _.isBuffer(new Uint8Array(2)); + * // => false + */ + var isBuffer = nativeIsBuffer || stubFalse; + + /** + * Checks if `value` is classified as a `Date` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + * @example + * + * _.isDate(new Date); + * // => true + * + * _.isDate('Mon April 23 2012'); + * // => false + */ + var isDate = nodeIsDate ? baseUnary(nodeIsDate) : baseIsDate; + + /** + * Checks if `value` is likely a DOM element. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a DOM element, else `false`. + * @example + * + * _.isElement(document.body); + * // => true + * + * _.isElement(''); + * // => false + */ + function isElement(value) { + return isObjectLike(value) && value.nodeType === 1 && !isPlainObject(value); + } + + /** + * Checks if `value` is an empty object, collection, map, or set. + * + * Objects are considered empty if they have no own enumerable string keyed + * properties. + * + * Array-like values such as `arguments` objects, arrays, buffers, strings, or + * jQuery-like collections are considered empty if they have a `length` of `0`. + * Similarly, maps and sets are considered empty if they have a `size` of `0`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is empty, else `false`. + * @example + * + * _.isEmpty(null); + * // => true + * + * _.isEmpty(true); + * // => true + * + * _.isEmpty(1); + * // => true + * + * _.isEmpty([1, 2, 3]); + * // => false + * + * _.isEmpty({ 'a': 1 }); + * // => false + */ + function isEmpty(value) { + if (value == null) { + return true; + } + if (isArrayLike(value) && + (isArray(value) || typeof value == 'string' || typeof value.splice == 'function' || + isBuffer(value) || isTypedArray(value) || isArguments(value))) { + return !value.length; + } + var tag = getTag(value); + if (tag == mapTag || tag == setTag) { + return !value.size; + } + if (isPrototype(value)) { + return !baseKeys(value).length; + } + for (var key in value) { + if (hasOwnProperty.call(value, key)) { + return false; + } + } + return true; + } + + /** + * Performs a deep comparison between two values to determine if they are + * equivalent. + * + * **Note:** This method supports comparing arrays, array buffers, booleans, + * date objects, error objects, maps, numbers, `Object` objects, regexes, + * sets, strings, symbols, and typed arrays. `Object` objects are compared + * by their own, not inherited, enumerable properties. Functions and DOM + * nodes are compared by strict equality, i.e. `===`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.isEqual(object, other); + * // => true + * + * object === other; + * // => false + */ + function isEqual(value, other) { + return baseIsEqual(value, other); + } + + /** + * This method is like `_.isEqual` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with up to + * six arguments: (objValue, othValue [, index|key, object, other, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, othValue) { + * if (isGreeting(objValue) && isGreeting(othValue)) { + * return true; + * } + * } + * + * var array = ['hello', 'goodbye']; + * var other = ['hi', 'goodbye']; + * + * _.isEqualWith(array, other, customizer); + * // => true + */ + function isEqualWith(value, other, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + var result = customizer ? customizer(value, other) : undefined; + return result === undefined ? baseIsEqual(value, other, undefined, customizer) : !!result; + } + + /** + * Checks if `value` is an `Error`, `EvalError`, `RangeError`, `ReferenceError`, + * `SyntaxError`, `TypeError`, or `URIError` object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an error object, else `false`. + * @example + * + * _.isError(new Error); + * // => true + * + * _.isError(Error); + * // => false + */ + function isError(value) { + if (!isObjectLike(value)) { + return false; + } + var tag = baseGetTag(value); + return tag == errorTag || tag == domExcTag || + (typeof value.message == 'string' && typeof value.name == 'string' && !isPlainObject(value)); + } + + /** + * Checks if `value` is a finite primitive number. + * + * **Note:** This method is based on + * [`Number.isFinite`](https://mdn.io/Number/isFinite). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a finite number, else `false`. + * @example + * + * _.isFinite(3); + * // => true + * + * _.isFinite(Number.MIN_VALUE); + * // => true + * + * _.isFinite(Infinity); + * // => false + * + * _.isFinite('3'); + * // => false + */ + function isFinite(value) { + return typeof value == 'number' && nativeIsFinite(value); + } + + /** + * Checks if `value` is classified as a `Function` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false + */ + function isFunction(value) { + if (!isObject(value)) { + return false; + } + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 9 which returns 'object' for typed arrays and other constructors. + var tag = baseGetTag(value); + return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; + } + + /** + * Checks if `value` is an integer. + * + * **Note:** This method is based on + * [`Number.isInteger`](https://mdn.io/Number/isInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an integer, else `false`. + * @example + * + * _.isInteger(3); + * // => true + * + * _.isInteger(Number.MIN_VALUE); + * // => false + * + * _.isInteger(Infinity); + * // => false + * + * _.isInteger('3'); + * // => false + */ + function isInteger(value) { + return typeof value == 'number' && value == toInteger(value); + } + + /** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ + function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; + } + + /** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ + function isObject(value) { + var type = typeof value; + return value != null && (type == 'object' || type == 'function'); + } + + /** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ + function isObjectLike(value) { + return value != null && typeof value == 'object'; + } + + /** + * Checks if `value` is classified as a `Map` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + * @example + * + * _.isMap(new Map); + * // => true + * + * _.isMap(new WeakMap); + * // => false + */ + var isMap = nodeIsMap ? baseUnary(nodeIsMap) : baseIsMap; + + /** + * Performs a partial deep comparison between `object` and `source` to + * determine if `object` contains equivalent property values. + * + * **Note:** This method is equivalent to `_.matches` when `source` is + * partially applied. + * + * Partial comparisons will match empty array and empty object `source` + * values against any array or object value, respectively. See `_.isEqual` + * for a list of supported value comparisons. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.isMatch(object, { 'b': 2 }); + * // => true + * + * _.isMatch(object, { 'b': 1 }); + * // => false + */ + function isMatch(object, source) { + return object === source || baseIsMatch(object, source, getMatchData(source)); + } + + /** + * This method is like `_.isMatch` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with five + * arguments: (objValue, srcValue, index|key, object, source). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, srcValue) { + * if (isGreeting(objValue) && isGreeting(srcValue)) { + * return true; + * } + * } + * + * var object = { 'greeting': 'hello' }; + * var source = { 'greeting': 'hi' }; + * + * _.isMatchWith(object, source, customizer); + * // => true + */ + function isMatchWith(object, source, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseIsMatch(object, source, getMatchData(source), customizer); + } + + /** + * Checks if `value` is `NaN`. + * + * **Note:** This method is based on + * [`Number.isNaN`](https://mdn.io/Number/isNaN) and is not the same as + * global [`isNaN`](https://mdn.io/isNaN) which returns `true` for + * `undefined` and other non-number values. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + * @example + * + * _.isNaN(NaN); + * // => true + * + * _.isNaN(new Number(NaN)); + * // => true + * + * isNaN(undefined); + * // => true + * + * _.isNaN(undefined); + * // => false + */ + function isNaN(value) { + // An `NaN` primitive is the only value that is not equal to itself. + // Perform the `toStringTag` check first to avoid errors with some + // ActiveX objects in IE. + return isNumber(value) && value != +value; + } + + /** + * Checks if `value` is a pristine native function. + * + * **Note:** This method can't reliably detect native functions in the presence + * of the core-js package because core-js circumvents this kind of detection. + * Despite multiple requests, the core-js maintainer has made it clear: any + * attempt to fix the detection will be obstructed. As a result, we're left + * with little choice but to throw an error. Unfortunately, this also affects + * packages, like [babel-polyfill](https://www.npmjs.com/package/babel-polyfill), + * which rely on core-js. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + * @example + * + * _.isNative(Array.prototype.push); + * // => true + * + * _.isNative(_); + * // => false + */ + function isNative(value) { + if (isMaskable(value)) { + throw new Error(CORE_ERROR_TEXT); + } + return baseIsNative(value); + } + + /** + * Checks if `value` is `null`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `null`, else `false`. + * @example + * + * _.isNull(null); + * // => true + * + * _.isNull(void 0); + * // => false + */ + function isNull(value) { + return value === null; + } + + /** + * Checks if `value` is `null` or `undefined`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is nullish, else `false`. + * @example + * + * _.isNil(null); + * // => true + * + * _.isNil(void 0); + * // => true + * + * _.isNil(NaN); + * // => false + */ + function isNil(value) { + return value == null; + } + + /** + * Checks if `value` is classified as a `Number` primitive or object. + * + * **Note:** To exclude `Infinity`, `-Infinity`, and `NaN`, which are + * classified as numbers, use the `_.isFinite` method. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a number, else `false`. + * @example + * + * _.isNumber(3); + * // => true + * + * _.isNumber(Number.MIN_VALUE); + * // => true + * + * _.isNumber(Infinity); + * // => true + * + * _.isNumber('3'); + * // => false + */ + function isNumber(value) { + return typeof value == 'number' || + (isObjectLike(value) && baseGetTag(value) == numberTag); + } + + /** + * Checks if `value` is a plain object, that is, an object created by the + * `Object` constructor or one with a `[[Prototype]]` of `null`. + * + * @static + * @memberOf _ + * @since 0.8.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * _.isPlainObject(new Foo); + * // => false + * + * _.isPlainObject([1, 2, 3]); + * // => false + * + * _.isPlainObject({ 'x': 0, 'y': 0 }); + * // => true + * + * _.isPlainObject(Object.create(null)); + * // => true + */ + function isPlainObject(value) { + if (!isObjectLike(value) || baseGetTag(value) != objectTag) { + return false; + } + var proto = getPrototype(value); + if (proto === null) { + return true; + } + var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; + return typeof Ctor == 'function' && Ctor instanceof Ctor && + funcToString.call(Ctor) == objectCtorString; + } + + /** + * Checks if `value` is classified as a `RegExp` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + * @example + * + * _.isRegExp(/abc/); + * // => true + * + * _.isRegExp('/abc/'); + * // => false + */ + var isRegExp = nodeIsRegExp ? baseUnary(nodeIsRegExp) : baseIsRegExp; + + /** + * Checks if `value` is a safe integer. An integer is safe if it's an IEEE-754 + * double precision number which isn't the result of a rounded unsafe integer. + * + * **Note:** This method is based on + * [`Number.isSafeInteger`](https://mdn.io/Number/isSafeInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a safe integer, else `false`. + * @example + * + * _.isSafeInteger(3); + * // => true + * + * _.isSafeInteger(Number.MIN_VALUE); + * // => false + * + * _.isSafeInteger(Infinity); + * // => false + * + * _.isSafeInteger('3'); + * // => false + */ + function isSafeInteger(value) { + return isInteger(value) && value >= -MAX_SAFE_INTEGER && value <= MAX_SAFE_INTEGER; + } + + /** + * Checks if `value` is classified as a `Set` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + * @example + * + * _.isSet(new Set); + * // => true + * + * _.isSet(new WeakSet); + * // => false + */ + var isSet = nodeIsSet ? baseUnary(nodeIsSet) : baseIsSet; + + /** + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a string, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false + */ + function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && baseGetTag(value) == stringTag); + } + + /** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ + function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && baseGetTag(value) == symbolTag); + } + + /** + * Checks if `value` is classified as a typed array. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + * @example + * + * _.isTypedArray(new Uint8Array); + * // => true + * + * _.isTypedArray([]); + * // => false + */ + var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; + + /** + * Checks if `value` is `undefined`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `undefined`, else `false`. + * @example + * + * _.isUndefined(void 0); + * // => true + * + * _.isUndefined(null); + * // => false + */ + function isUndefined(value) { + return value === undefined; + } + + /** + * Checks if `value` is classified as a `WeakMap` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak map, else `false`. + * @example + * + * _.isWeakMap(new WeakMap); + * // => true + * + * _.isWeakMap(new Map); + * // => false + */ + function isWeakMap(value) { + return isObjectLike(value) && getTag(value) == weakMapTag; + } + + /** + * Checks if `value` is classified as a `WeakSet` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak set, else `false`. + * @example + * + * _.isWeakSet(new WeakSet); + * // => true + * + * _.isWeakSet(new Set); + * // => false + */ + function isWeakSet(value) { + return isObjectLike(value) && baseGetTag(value) == weakSetTag; + } + + /** + * Checks if `value` is less than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + * @see _.gt + * @example + * + * _.lt(1, 3); + * // => true + * + * _.lt(3, 3); + * // => false + * + * _.lt(3, 1); + * // => false + */ + var lt = createRelationalOperation(baseLt); + + /** + * Checks if `value` is less than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than or equal to + * `other`, else `false`. + * @see _.gte + * @example + * + * _.lte(1, 3); + * // => true + * + * _.lte(3, 3); + * // => true + * + * _.lte(3, 1); + * // => false + */ + var lte = createRelationalOperation(function(value, other) { + return value <= other; + }); + + /** + * Converts `value` to an array. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to convert. + * @returns {Array} Returns the converted array. + * @example + * + * _.toArray({ 'a': 1, 'b': 2 }); + * // => [1, 2] + * + * _.toArray('abc'); + * // => ['a', 'b', 'c'] + * + * _.toArray(1); + * // => [] + * + * _.toArray(null); + * // => [] + */ + function toArray(value) { + if (!value) { + return []; + } + if (isArrayLike(value)) { + return isString(value) ? stringToArray(value) : copyArray(value); + } + if (symIterator && value[symIterator]) { + return iteratorToArray(value[symIterator]()); + } + var tag = getTag(value), + func = tag == mapTag ? mapToArray : (tag == setTag ? setToArray : values); + + return func(value); + } + + /** + * Converts `value` to a finite number. + * + * @static + * @memberOf _ + * @since 4.12.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted number. + * @example + * + * _.toFinite(3.2); + * // => 3.2 + * + * _.toFinite(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toFinite(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toFinite('3.2'); + * // => 3.2 + */ + function toFinite(value) { + if (!value) { + return value === 0 ? value : 0; + } + value = toNumber(value); + if (value === INFINITY || value === -INFINITY) { + var sign = (value < 0 ? -1 : 1); + return sign * MAX_INTEGER; + } + return value === value ? value : 0; + } + + /** + * Converts `value` to an integer. + * + * **Note:** This method is loosely based on + * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toInteger(3.2); + * // => 3 + * + * _.toInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toInteger(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toInteger('3.2'); + * // => 3 + */ + function toInteger(value) { + var result = toFinite(value), + remainder = result % 1; + + return result === result ? (remainder ? result - remainder : result) : 0; + } + + /** + * Converts `value` to an integer suitable for use as the length of an + * array-like object. + * + * **Note:** This method is based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toLength(3.2); + * // => 3 + * + * _.toLength(Number.MIN_VALUE); + * // => 0 + * + * _.toLength(Infinity); + * // => 4294967295 + * + * _.toLength('3.2'); + * // => 3 + */ + function toLength(value) { + return value ? baseClamp(toInteger(value), 0, MAX_ARRAY_LENGTH) : 0; + } + + /** + * Converts `value` to a number. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {number} Returns the number. + * @example + * + * _.toNumber(3.2); + * // => 3.2 + * + * _.toNumber(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toNumber(Infinity); + * // => Infinity + * + * _.toNumber('3.2'); + * // => 3.2 + */ + function toNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + if (isObject(value)) { + var other = typeof value.valueOf == 'function' ? value.valueOf() : value; + value = isObject(other) ? (other + '') : other; + } + if (typeof value != 'string') { + return value === 0 ? value : +value; + } + value = value.replace(reTrim, ''); + var isBinary = reIsBinary.test(value); + return (isBinary || reIsOctal.test(value)) + ? freeParseInt(value.slice(2), isBinary ? 2 : 8) + : (reIsBadHex.test(value) ? NAN : +value); + } + + /** + * Converts `value` to a plain object flattening inherited enumerable string + * keyed properties of `value` to own properties of the plain object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {Object} Returns the converted plain object. + * @example + * + * function Foo() { + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.assign({ 'a': 1 }, new Foo); + * // => { 'a': 1, 'b': 2 } + * + * _.assign({ 'a': 1 }, _.toPlainObject(new Foo)); + * // => { 'a': 1, 'b': 2, 'c': 3 } + */ + function toPlainObject(value) { + return copyObject(value, keysIn(value)); + } + + /** + * Converts `value` to a safe integer. A safe integer can be compared and + * represented correctly. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toSafeInteger(3.2); + * // => 3 + * + * _.toSafeInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toSafeInteger(Infinity); + * // => 9007199254740991 + * + * _.toSafeInteger('3.2'); + * // => 3 + */ + function toSafeInteger(value) { + return value + ? baseClamp(toInteger(value), -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER) + : (value === 0 ? value : 0); + } + + /** + * Converts `value` to a string. An empty string is returned for `null` + * and `undefined` values. The sign of `-0` is preserved. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.toString(null); + * // => '' + * + * _.toString(-0); + * // => '-0' + * + * _.toString([1, 2, 3]); + * // => '1,2,3' + */ + function toString(value) { + return value == null ? '' : baseToString(value); + } + + /*------------------------------------------------------------------------*/ + + /** + * Assigns own enumerable string keyed properties of source objects to the + * destination object. Source objects are applied from left to right. + * Subsequent sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object` and is loosely based on + * [`Object.assign`](https://mdn.io/Object/assign). + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assignIn + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assign({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'c': 3 } + */ + var assign = createAssigner(function(object, source) { + if (isPrototype(source) || isArrayLike(source)) { + copyObject(source, keys(source), object); + return; + } + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + assignValue(object, key, source[key]); + } + } + }); + + /** + * This method is like `_.assign` except that it iterates over own and + * inherited source properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extend + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assign + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assignIn({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'b': 2, 'c': 3, 'd': 4 } + */ + var assignIn = createAssigner(function(object, source) { + copyObject(source, keysIn(source), object); + }); + + /** + * This method is like `_.assignIn` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extendWith + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignInWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var assignInWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keysIn(source), object, customizer); + }); + + /** + * This method is like `_.assign` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignInWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var assignWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keys(source), object, customizer); + }); + + /** + * Creates an array of values corresponding to `paths` of `object`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Array} Returns the picked values. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _.at(object, ['a[0].b.c', 'a[1]']); + * // => [3, 4] + */ + var at = flatRest(baseAt); + + /** + * Creates an object that inherits from the `prototype` object. If a + * `properties` object is given, its own enumerable string keyed properties + * are assigned to the created object. + * + * @static + * @memberOf _ + * @since 2.3.0 + * @category Object + * @param {Object} prototype The object to inherit from. + * @param {Object} [properties] The properties to assign to the object. + * @returns {Object} Returns the new object. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * function Circle() { + * Shape.call(this); + * } + * + * Circle.prototype = _.create(Shape.prototype, { + * 'constructor': Circle + * }); + * + * var circle = new Circle; + * circle instanceof Circle; + * // => true + * + * circle instanceof Shape; + * // => true + */ + function create(prototype, properties) { + var result = baseCreate(prototype); + return properties == null ? result : baseAssign(result, properties); + } + + /** + * Assigns own and inherited enumerable string keyed properties of source + * objects to the destination object for all destination properties that + * resolve to `undefined`. Source objects are applied from left to right. + * Once a property is set, additional values of the same property are ignored. + * + * **Note:** This method mutates `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaultsDeep + * @example + * + * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var defaults = baseRest(function(object, sources) { + object = Object(object); + + var index = -1; + var length = sources.length; + var guard = length > 2 ? sources[2] : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + length = 1; + } + + while (++index < length) { + var source = sources[index]; + var props = keysIn(source); + var propsIndex = -1; + var propsLength = props.length; + + while (++propsIndex < propsLength) { + var key = props[propsIndex]; + var value = object[key]; + + if (value === undefined || + (eq(value, objectProto[key]) && !hasOwnProperty.call(object, key))) { + object[key] = source[key]; + } + } + } + + return object; + }); + + /** + * This method is like `_.defaults` except that it recursively assigns + * default properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaults + * @example + * + * _.defaultsDeep({ 'a': { 'b': 2 } }, { 'a': { 'b': 1, 'c': 3 } }); + * // => { 'a': { 'b': 2, 'c': 3 } } + */ + var defaultsDeep = baseRest(function(args) { + args.push(undefined, customDefaultsMerge); + return apply(mergeWith, undefined, args); + }); + + /** + * This method is like `_.find` except that it returns the key of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findKey(users, function(o) { return o.age < 40; }); + * // => 'barney' (iteration order is not guaranteed) + * + * // The `_.matches` iteratee shorthand. + * _.findKey(users, { 'age': 1, 'active': true }); + * // => 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findKey(users, 'active'); + * // => 'barney' + */ + function findKey(object, predicate) { + return baseFindKey(object, getIteratee(predicate, 3), baseForOwn); + } + + /** + * This method is like `_.findKey` except that it iterates over elements of + * a collection in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findLastKey(users, function(o) { return o.age < 40; }); + * // => returns 'pebbles' assuming `_.findKey` returns 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.findLastKey(users, { 'age': 36, 'active': true }); + * // => 'barney' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findLastKey(users, 'active'); + * // => 'pebbles' + */ + function findLastKey(object, predicate) { + return baseFindKey(object, getIteratee(predicate, 3), baseForOwnRight); + } + + /** + * Iterates over own and inherited enumerable string keyed properties of an + * object and invokes `iteratee` for each property. The iteratee is invoked + * with three arguments: (value, key, object). Iteratee functions may exit + * iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forInRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forIn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a', 'b', then 'c' (iteration order is not guaranteed). + */ + function forIn(object, iteratee) { + return object == null + ? object + : baseFor(object, getIteratee(iteratee, 3), keysIn); + } + + /** + * This method is like `_.forIn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forIn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forInRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'c', 'b', then 'a' assuming `_.forIn` logs 'a', 'b', then 'c'. + */ + function forInRight(object, iteratee) { + return object == null + ? object + : baseForRight(object, getIteratee(iteratee, 3), keysIn); + } + + /** + * Iterates over own enumerable string keyed properties of an object and + * invokes `iteratee` for each property. The iteratee is invoked with three + * arguments: (value, key, object). Iteratee functions may exit iteration + * early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwnRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forOwn(object, iteratee) { + return object && baseForOwn(object, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.forOwn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwnRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'b' then 'a' assuming `_.forOwn` logs 'a' then 'b'. + */ + function forOwnRight(object, iteratee) { + return object && baseForOwnRight(object, getIteratee(iteratee, 3)); + } + + /** + * Creates an array of function property names from own enumerable properties + * of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functionsIn + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functions(new Foo); + * // => ['a', 'b'] + */ + function functions(object) { + return object == null ? [] : baseFunctions(object, keys(object)); + } + + /** + * Creates an array of function property names from own and inherited + * enumerable properties of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functions + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functionsIn(new Foo); + * // => ['a', 'b', 'c'] + */ + function functionsIn(object) { + return object == null ? [] : baseFunctions(object, keysIn(object)); + } + + /** + * Gets the value at `path` of `object`. If the resolved value is + * `undefined`, the `defaultValue` is returned in its place. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.get(object, 'a[0].b.c'); + * // => 3 + * + * _.get(object, ['a', '0', 'b', 'c']); + * // => 3 + * + * _.get(object, 'a.b.c', 'default'); + * // => 'default' + */ + function get(object, path, defaultValue) { + var result = object == null ? undefined : baseGet(object, path); + return result === undefined ? defaultValue : result; + } + + /** + * Checks if `path` is a direct property of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = { 'a': { 'b': 2 } }; + * var other = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.has(object, 'a'); + * // => true + * + * _.has(object, 'a.b'); + * // => true + * + * _.has(object, ['a', 'b']); + * // => true + * + * _.has(other, 'a'); + * // => false + */ + function has(object, path) { + return object != null && hasPath(object, path, baseHas); + } + + /** + * Checks if `path` is a direct or inherited property of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.hasIn(object, 'a'); + * // => true + * + * _.hasIn(object, 'a.b'); + * // => true + * + * _.hasIn(object, ['a', 'b']); + * // => true + * + * _.hasIn(object, 'b'); + * // => false + */ + function hasIn(object, path) { + return object != null && hasPath(object, path, baseHasIn); + } + + /** + * Creates an object composed of the inverted keys and values of `object`. + * If `object` contains duplicate values, subsequent values overwrite + * property assignments of previous values. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Object + * @param {Object} object The object to invert. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invert(object); + * // => { '1': 'c', '2': 'b' } + */ + var invert = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + + result[value] = key; + }, constant(identity)); + + /** + * This method is like `_.invert` except that the inverted object is generated + * from the results of running each element of `object` thru `iteratee`. The + * corresponding inverted value of each inverted key is an array of keys + * responsible for generating the inverted value. The iteratee is invoked + * with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Object + * @param {Object} object The object to invert. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invertBy(object); + * // => { '1': ['a', 'c'], '2': ['b'] } + * + * _.invertBy(object, function(value) { + * return 'group' + value; + * }); + * // => { 'group1': ['a', 'c'], 'group2': ['b'] } + */ + var invertBy = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + + if (hasOwnProperty.call(result, value)) { + result[value].push(key); + } else { + result[value] = [key]; + } + }, getIteratee); + + /** + * Invokes the method at `path` of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {...*} [args] The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + * @example + * + * var object = { 'a': [{ 'b': { 'c': [1, 2, 3, 4] } }] }; + * + * _.invoke(object, 'a[0].b.c.slice', 1, 3); + * // => [2, 3] + */ + var invoke = baseRest(baseInvoke); + + /** + * Creates an array of the own enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. See the + * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * for more details. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keys(new Foo); + * // => ['a', 'b'] (iteration order is not guaranteed) + * + * _.keys('hi'); + * // => ['0', '1'] + */ + function keys(object) { + return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); + } + + /** + * Creates an array of the own and inherited enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keysIn(new Foo); + * // => ['a', 'b', 'c'] (iteration order is not guaranteed) + */ + function keysIn(object) { + return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object); + } + + /** + * The opposite of `_.mapValues`; this method creates an object with the + * same values as `object` and keys generated by running each own enumerable + * string keyed property of `object` thru `iteratee`. The iteratee is invoked + * with three arguments: (value, key, object). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns the new mapped object. + * @see _.mapValues + * @example + * + * _.mapKeys({ 'a': 1, 'b': 2 }, function(value, key) { + * return key + value; + * }); + * // => { 'a1': 1, 'b2': 2 } + */ + function mapKeys(object, iteratee) { + var result = {}; + iteratee = getIteratee(iteratee, 3); + + baseForOwn(object, function(value, key, object) { + baseAssignValue(result, iteratee(value, key, object), value); + }); + return result; + } + + /** + * Creates an object with the same keys as `object` and values generated + * by running each own enumerable string keyed property of `object` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, key, object). + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns the new mapped object. + * @see _.mapKeys + * @example + * + * var users = { + * 'fred': { 'user': 'fred', 'age': 40 }, + * 'pebbles': { 'user': 'pebbles', 'age': 1 } + * }; + * + * _.mapValues(users, function(o) { return o.age; }); + * // => { 'fred': 40, 'pebbles': 1 } (iteration order is not guaranteed) + * + * // The `_.property` iteratee shorthand. + * _.mapValues(users, 'age'); + * // => { 'fred': 40, 'pebbles': 1 } (iteration order is not guaranteed) + */ + function mapValues(object, iteratee) { + var result = {}; + iteratee = getIteratee(iteratee, 3); + + baseForOwn(object, function(value, key, object) { + baseAssignValue(result, key, iteratee(value, key, object)); + }); + return result; + } + + /** + * This method is like `_.assign` except that it recursively merges own and + * inherited enumerable string keyed properties of source objects into the + * destination object. Source properties that resolve to `undefined` are + * skipped if a destination value exists. Array and plain object properties + * are merged recursively. Other objects and value types are overridden by + * assignment. Source objects are applied from left to right. Subsequent + * sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @example + * + * var object = { + * 'a': [{ 'b': 2 }, { 'd': 4 }] + * }; + * + * var other = { + * 'a': [{ 'c': 3 }, { 'e': 5 }] + * }; + * + * _.merge(object, other); + * // => { 'a': [{ 'b': 2, 'c': 3 }, { 'd': 4, 'e': 5 }] } + */ + var merge = createAssigner(function(object, source, srcIndex) { + baseMerge(object, source, srcIndex); + }); + + /** + * This method is like `_.merge` except that it accepts `customizer` which + * is invoked to produce the merged values of the destination and source + * properties. If `customizer` returns `undefined`, merging is handled by the + * method instead. The `customizer` is invoked with six arguments: + * (objValue, srcValue, key, object, source, stack). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} customizer The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * function customizer(objValue, srcValue) { + * if (_.isArray(objValue)) { + * return objValue.concat(srcValue); + * } + * } + * + * var object = { 'a': [1], 'b': [2] }; + * var other = { 'a': [3], 'b': [4] }; + * + * _.mergeWith(object, other, customizer); + * // => { 'a': [1, 3], 'b': [2, 4] } + */ + var mergeWith = createAssigner(function(object, source, srcIndex, customizer) { + baseMerge(object, source, srcIndex, customizer); + }); + + /** + * The opposite of `_.pick`; this method creates an object composed of the + * own and inherited enumerable property paths of `object` that are not omitted. + * + * **Note:** This method is considerably slower than `_.pick`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to omit. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.omit(object, ['a', 'c']); + * // => { 'b': '2' } + */ + var omit = flatRest(function(object, paths) { + var result = {}; + if (object == null) { + return result; + } + var isDeep = false; + paths = arrayMap(paths, function(path) { + path = castPath(path, object); + isDeep || (isDeep = path.length > 1); + return path; + }); + copyObject(object, getAllKeysIn(object), result); + if (isDeep) { + result = baseClone(result, CLONE_DEEP_FLAG | CLONE_FLAT_FLAG | CLONE_SYMBOLS_FLAG, customOmitClone); + } + var length = paths.length; + while (length--) { + baseUnset(result, paths[length]); + } + return result; + }); + + /** + * The opposite of `_.pickBy`; this method creates an object composed of + * the own and inherited enumerable string keyed properties of `object` that + * `predicate` doesn't return truthy for. The predicate is invoked with two + * arguments: (value, key). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The source object. + * @param {Function} [predicate=_.identity] The function invoked per property. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.omitBy(object, _.isNumber); + * // => { 'b': '2' } + */ + function omitBy(object, predicate) { + return pickBy(object, negate(getIteratee(predicate))); + } + + /** + * Creates an object composed of the picked `object` properties. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pick(object, ['a', 'c']); + * // => { 'a': 1, 'c': 3 } + */ + var pick = flatRest(function(object, paths) { + return object == null ? {} : basePick(object, paths); + }); + + /** + * Creates an object composed of the `object` properties `predicate` returns + * truthy for. The predicate is invoked with two arguments: (value, key). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The source object. + * @param {Function} [predicate=_.identity] The function invoked per property. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pickBy(object, _.isNumber); + * // => { 'a': 1, 'c': 3 } + */ + function pickBy(object, predicate) { + if (object == null) { + return {}; + } + var props = arrayMap(getAllKeysIn(object), function(prop) { + return [prop]; + }); + predicate = getIteratee(predicate); + return basePickBy(object, props, function(value, path) { + return predicate(value, path[0]); + }); + } + + /** + * This method is like `_.get` except that if the resolved value is a + * function it's invoked with the `this` binding of its parent object and + * its result is returned. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to resolve. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c1': 3, 'c2': _.constant(4) } }] }; + * + * _.result(object, 'a[0].b.c1'); + * // => 3 + * + * _.result(object, 'a[0].b.c2'); + * // => 4 + * + * _.result(object, 'a[0].b.c3', 'default'); + * // => 'default' + * + * _.result(object, 'a[0].b.c3', _.constant('default')); + * // => 'default' + */ + function result(object, path, defaultValue) { + path = castPath(path, object); + + var index = -1, + length = path.length; + + // Ensure the loop is entered when path is empty. + if (!length) { + length = 1; + object = undefined; + } + while (++index < length) { + var value = object == null ? undefined : object[toKey(path[index])]; + if (value === undefined) { + index = length; + value = defaultValue; + } + object = isFunction(value) ? value.call(object) : value; + } + return object; + } + + /** + * Sets the value at `path` of `object`. If a portion of `path` doesn't exist, + * it's created. Arrays are created for missing index properties while objects + * are created for all other missing properties. Use `_.setWith` to customize + * `path` creation. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @returns {Object} Returns `object`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.set(object, 'a[0].b.c', 4); + * console.log(object.a[0].b.c); + * // => 4 + * + * _.set(object, ['x', '0', 'y', 'z'], 5); + * console.log(object.x[0].y.z); + * // => 5 + */ + function set(object, path, value) { + return object == null ? object : baseSet(object, path, value); + } + + /** + * This method is like `_.set` except that it accepts `customizer` which is + * invoked to produce the objects of `path`. If `customizer` returns `undefined` + * path creation is handled by the method instead. The `customizer` is invoked + * with three arguments: (nsValue, key, nsObject). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * var object = {}; + * + * _.setWith(object, '[0][1]', 'a', Object); + * // => { '0': { '1': 'a' } } + */ + function setWith(object, path, value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return object == null ? object : baseSet(object, path, value, customizer); + } + + /** + * Creates an array of own enumerable string keyed-value pairs for `object` + * which can be consumed by `_.fromPairs`. If `object` is a map or set, its + * entries are returned. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias entries + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the key-value pairs. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.toPairs(new Foo); + * // => [['a', 1], ['b', 2]] (iteration order is not guaranteed) + */ + var toPairs = createToPairs(keys); + + /** + * Creates an array of own and inherited enumerable string keyed-value pairs + * for `object` which can be consumed by `_.fromPairs`. If `object` is a map + * or set, its entries are returned. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias entriesIn + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the key-value pairs. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.toPairsIn(new Foo); + * // => [['a', 1], ['b', 2], ['c', 3]] (iteration order is not guaranteed) + */ + var toPairsIn = createToPairs(keysIn); + + /** + * An alternative to `_.reduce`; this method transforms `object` to a new + * `accumulator` object which is the result of running each of its own + * enumerable string keyed properties thru `iteratee`, with each invocation + * potentially mutating the `accumulator` object. If `accumulator` is not + * provided, a new object with the same `[[Prototype]]` will be used. The + * iteratee is invoked with four arguments: (accumulator, value, key, object). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The custom accumulator value. + * @returns {*} Returns the accumulated value. + * @example + * + * _.transform([2, 3, 4], function(result, n) { + * result.push(n *= n); + * return n % 2 == 0; + * }, []); + * // => [4, 9] + * + * _.transform({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } + */ + function transform(object, iteratee, accumulator) { + var isArr = isArray(object), + isArrLike = isArr || isBuffer(object) || isTypedArray(object); + + iteratee = getIteratee(iteratee, 4); + if (accumulator == null) { + var Ctor = object && object.constructor; + if (isArrLike) { + accumulator = isArr ? new Ctor : []; + } + else if (isObject(object)) { + accumulator = isFunction(Ctor) ? baseCreate(getPrototype(object)) : {}; + } + else { + accumulator = {}; + } + } + (isArrLike ? arrayEach : baseForOwn)(object, function(value, index, object) { + return iteratee(accumulator, value, index, object); + }); + return accumulator; + } + + /** + * Removes the property at `path` of `object`. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 7 } }] }; + * _.unset(object, 'a[0].b.c'); + * // => true + * + * console.log(object); + * // => { 'a': [{ 'b': {} }] }; + * + * _.unset(object, ['a', '0', 'b', 'c']); + * // => true + * + * console.log(object); + * // => { 'a': [{ 'b': {} }] }; + */ + function unset(object, path) { + return object == null ? true : baseUnset(object, path); + } + + /** + * This method is like `_.set` except that accepts `updater` to produce the + * value to set. Use `_.updateWith` to customize `path` creation. The `updater` + * is invoked with one argument: (value). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {Function} updater The function to produce the updated value. + * @returns {Object} Returns `object`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.update(object, 'a[0].b.c', function(n) { return n * n; }); + * console.log(object.a[0].b.c); + * // => 9 + * + * _.update(object, 'x[0].y.z', function(n) { return n ? n + 1 : 0; }); + * console.log(object.x[0].y.z); + * // => 0 + */ + function update(object, path, updater) { + return object == null ? object : baseUpdate(object, path, castFunction(updater)); + } + + /** + * This method is like `_.update` except that it accepts `customizer` which is + * invoked to produce the objects of `path`. If `customizer` returns `undefined` + * path creation is handled by the method instead. The `customizer` is invoked + * with three arguments: (nsValue, key, nsObject). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * var object = {}; + * + * _.updateWith(object, '[0][1]', _.constant('a'), Object); + * // => { '0': { '1': 'a' } } + */ + function updateWith(object, path, updater, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return object == null ? object : baseUpdate(object, path, castFunction(updater), customizer); + } + + /** + * Creates an array of the own enumerable string keyed property values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.values(new Foo); + * // => [1, 2] (iteration order is not guaranteed) + * + * _.values('hi'); + * // => ['h', 'i'] + */ + function values(object) { + return object == null ? [] : baseValues(object, keys(object)); + } + + /** + * Creates an array of the own and inherited enumerable string keyed property + * values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.valuesIn(new Foo); + * // => [1, 2, 3] (iteration order is not guaranteed) + */ + function valuesIn(object) { + return object == null ? [] : baseValues(object, keysIn(object)); + } + + /*------------------------------------------------------------------------*/ + + /** + * Clamps `number` within the inclusive `lower` and `upper` bounds. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Number + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + * @example + * + * _.clamp(-10, -5, 5); + * // => -5 + * + * _.clamp(10, -5, 5); + * // => 5 + */ + function clamp(number, lower, upper) { + if (upper === undefined) { + upper = lower; + lower = undefined; + } + if (upper !== undefined) { + upper = toNumber(upper); + upper = upper === upper ? upper : 0; + } + if (lower !== undefined) { + lower = toNumber(lower); + lower = lower === lower ? lower : 0; + } + return baseClamp(toNumber(number), lower, upper); + } + + /** + * Checks if `n` is between `start` and up to, but not including, `end`. If + * `end` is not specified, it's set to `start` with `start` then set to `0`. + * If `start` is greater than `end` the params are swapped to support + * negative ranges. + * + * @static + * @memberOf _ + * @since 3.3.0 + * @category Number + * @param {number} number The number to check. + * @param {number} [start=0] The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + * @see _.range, _.rangeRight + * @example + * + * _.inRange(3, 2, 4); + * // => true + * + * _.inRange(4, 8); + * // => true + * + * _.inRange(4, 2); + * // => false + * + * _.inRange(2, 2); + * // => false + * + * _.inRange(1.2, 2); + * // => true + * + * _.inRange(5.2, 4); + * // => false + * + * _.inRange(-3, -2, -6); + * // => true + */ + function inRange(number, start, end) { + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + number = toNumber(number); + return baseInRange(number, start, end); + } + + /** + * Produces a random number between the inclusive `lower` and `upper` bounds. + * If only one argument is provided a number between `0` and the given number + * is returned. If `floating` is `true`, or either `lower` or `upper` are + * floats, a floating-point number is returned instead of an integer. + * + * **Note:** JavaScript follows the IEEE-754 standard for resolving + * floating-point values which can produce unexpected results. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Number + * @param {number} [lower=0] The lower bound. + * @param {number} [upper=1] The upper bound. + * @param {boolean} [floating] Specify returning a floating-point number. + * @returns {number} Returns the random number. + * @example + * + * _.random(0, 5); + * // => an integer between 0 and 5 + * + * _.random(5); + * // => also an integer between 0 and 5 + * + * _.random(5, true); + * // => a floating-point number between 0 and 5 + * + * _.random(1.2, 5.2); + * // => a floating-point number between 1.2 and 5.2 + */ + function random(lower, upper, floating) { + if (floating && typeof floating != 'boolean' && isIterateeCall(lower, upper, floating)) { + upper = floating = undefined; + } + if (floating === undefined) { + if (typeof upper == 'boolean') { + floating = upper; + upper = undefined; + } + else if (typeof lower == 'boolean') { + floating = lower; + lower = undefined; + } + } + if (lower === undefined && upper === undefined) { + lower = 0; + upper = 1; + } + else { + lower = toFinite(lower); + if (upper === undefined) { + upper = lower; + lower = 0; + } else { + upper = toFinite(upper); + } + } + if (lower > upper) { + var temp = lower; + lower = upper; + upper = temp; + } + if (floating || lower % 1 || upper % 1) { + var rand = nativeRandom(); + return nativeMin(lower + (rand * (upper - lower + freeParseFloat('1e-' + ((rand + '').length - 1)))), upper); + } + return baseRandom(lower, upper); + } + + /*------------------------------------------------------------------------*/ + + /** + * Converts `string` to [camel case](https://en.wikipedia.org/wiki/CamelCase). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the camel cased string. + * @example + * + * _.camelCase('Foo Bar'); + * // => 'fooBar' + * + * _.camelCase('--foo-bar--'); + * // => 'fooBar' + * + * _.camelCase('__FOO_BAR__'); + * // => 'fooBar' + */ + var camelCase = createCompounder(function(result, word, index) { + word = word.toLowerCase(); + return result + (index ? capitalize(word) : word); + }); + + /** + * Converts the first character of `string` to upper case and the remaining + * to lower case. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to capitalize. + * @returns {string} Returns the capitalized string. + * @example + * + * _.capitalize('FRED'); + * // => 'Fred' + */ + function capitalize(string) { + return upperFirst(toString(string).toLowerCase()); + } + + /** + * Deburrs `string` by converting + * [Latin-1 Supplement](https://en.wikipedia.org/wiki/Latin-1_Supplement_(Unicode_block)#Character_table) + * and [Latin Extended-A](https://en.wikipedia.org/wiki/Latin_Extended-A) + * letters to basic Latin letters and removing + * [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to deburr. + * @returns {string} Returns the deburred string. + * @example + * + * _.deburr('déjà vu'); + * // => 'deja vu' + */ + function deburr(string) { + string = toString(string); + return string && string.replace(reLatin, deburrLetter).replace(reComboMark, ''); + } + + /** + * Checks if `string` ends with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=string.length] The position to search up to. + * @returns {boolean} Returns `true` if `string` ends with `target`, + * else `false`. + * @example + * + * _.endsWith('abc', 'c'); + * // => true + * + * _.endsWith('abc', 'b'); + * // => false + * + * _.endsWith('abc', 'b', 2); + * // => true + */ + function endsWith(string, target, position) { + string = toString(string); + target = baseToString(target); + + var length = string.length; + position = position === undefined + ? length + : baseClamp(toInteger(position), 0, length); + + var end = position; + position -= target.length; + return position >= 0 && string.slice(position, end) == target; + } + + /** + * Converts the characters "&", "<", ">", '"', and "'" in `string` to their + * corresponding HTML entities. + * + * **Note:** No other characters are escaped. To escape additional + * characters use a third-party library like [_he_](https://mths.be/he). + * + * Though the ">" character is escaped for symmetry, characters like + * ">" and "/" don't need escaping in HTML and have no special meaning + * unless they're part of a tag or unquoted attribute value. See + * [Mathias Bynens's article](https://mathiasbynens.be/notes/ambiguous-ampersands) + * (under "semi-related fun fact") for more details. + * + * When working with HTML you should always + * [quote attribute values](http://wonko.com/post/html-escaping) to reduce + * XSS vectors. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escape('fred, barney, & pebbles'); + * // => 'fred, barney, & pebbles' + */ + function escape(string) { + string = toString(string); + return (string && reHasUnescapedHtml.test(string)) + ? string.replace(reUnescapedHtml, escapeHtmlChar) + : string; + } + + /** + * Escapes the `RegExp` special characters "^", "$", "\", ".", "*", "+", + * "?", "(", ")", "[", "]", "{", "}", and "|" in `string`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escapeRegExp('[lodash](https://lodash.com/)'); + * // => '\[lodash\]\(https://lodash\.com/\)' + */ + function escapeRegExp(string) { + string = toString(string); + return (string && reHasRegExpChar.test(string)) + ? string.replace(reRegExpChar, '\\$&') + : string; + } + + /** + * Converts `string` to + * [kebab case](https://en.wikipedia.org/wiki/Letter_case#Special_case_styles). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the kebab cased string. + * @example + * + * _.kebabCase('Foo Bar'); + * // => 'foo-bar' + * + * _.kebabCase('fooBar'); + * // => 'foo-bar' + * + * _.kebabCase('__FOO_BAR__'); + * // => 'foo-bar' + */ + var kebabCase = createCompounder(function(result, word, index) { + return result + (index ? '-' : '') + word.toLowerCase(); + }); + + /** + * Converts `string`, as space separated words, to lower case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the lower cased string. + * @example + * + * _.lowerCase('--Foo-Bar--'); + * // => 'foo bar' + * + * _.lowerCase('fooBar'); + * // => 'foo bar' + * + * _.lowerCase('__FOO_BAR__'); + * // => 'foo bar' + */ + var lowerCase = createCompounder(function(result, word, index) { + return result + (index ? ' ' : '') + word.toLowerCase(); + }); + + /** + * Converts the first character of `string` to lower case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.lowerFirst('Fred'); + * // => 'fred' + * + * _.lowerFirst('FRED'); + * // => 'fRED' + */ + var lowerFirst = createCaseFirst('toLowerCase'); + + /** + * Pads `string` on the left and right sides if it's shorter than `length`. + * Padding characters are truncated if they can't be evenly divided by `length`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.pad('abc', 8); + * // => ' abc ' + * + * _.pad('abc', 8, '_-'); + * // => '_-abc_-_' + * + * _.pad('abc', 3); + * // => 'abc' + */ + function pad(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + if (!length || strLength >= length) { + return string; + } + var mid = (length - strLength) / 2; + return ( + createPadding(nativeFloor(mid), chars) + + string + + createPadding(nativeCeil(mid), chars) + ); + } + + /** + * Pads `string` on the right side if it's shorter than `length`. Padding + * characters are truncated if they exceed `length`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.padEnd('abc', 6); + * // => 'abc ' + * + * _.padEnd('abc', 6, '_-'); + * // => 'abc_-_' + * + * _.padEnd('abc', 3); + * // => 'abc' + */ + function padEnd(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + return (length && strLength < length) + ? (string + createPadding(length - strLength, chars)) + : string; + } + + /** + * Pads `string` on the left side if it's shorter than `length`. Padding + * characters are truncated if they exceed `length`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.padStart('abc', 6); + * // => ' abc' + * + * _.padStart('abc', 6, '_-'); + * // => '_-_abc' + * + * _.padStart('abc', 3); + * // => 'abc' + */ + function padStart(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + return (length && strLength < length) + ? (createPadding(length - strLength, chars) + string) + : string; + } + + /** + * Converts `string` to an integer of the specified radix. If `radix` is + * `undefined` or `0`, a `radix` of `10` is used unless `value` is a + * hexadecimal, in which case a `radix` of `16` is used. + * + * **Note:** This method aligns with the + * [ES5 implementation](https://es5.github.io/#x15.1.2.2) of `parseInt`. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category String + * @param {string} string The string to convert. + * @param {number} [radix=10] The radix to interpret `value` by. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {number} Returns the converted integer. + * @example + * + * _.parseInt('08'); + * // => 8 + * + * _.map(['6', '08', '10'], _.parseInt); + * // => [6, 8, 10] + */ + function parseInt(string, radix, guard) { + if (guard || radix == null) { + radix = 0; + } else if (radix) { + radix = +radix; + } + return nativeParseInt(toString(string).replace(reTrimStart, ''), radix || 0); + } + + /** + * Repeats the given string `n` times. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to repeat. + * @param {number} [n=1] The number of times to repeat the string. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {string} Returns the repeated string. + * @example + * + * _.repeat('*', 3); + * // => '***' + * + * _.repeat('abc', 2); + * // => 'abcabc' + * + * _.repeat('abc', 0); + * // => '' + */ + function repeat(string, n, guard) { + if ((guard ? isIterateeCall(string, n, guard) : n === undefined)) { + n = 1; + } else { + n = toInteger(n); + } + return baseRepeat(toString(string), n); + } + + /** + * Replaces matches for `pattern` in `string` with `replacement`. + * + * **Note:** This method is based on + * [`String#replace`](https://mdn.io/String/replace). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to modify. + * @param {RegExp|string} pattern The pattern to replace. + * @param {Function|string} replacement The match replacement. + * @returns {string} Returns the modified string. + * @example + * + * _.replace('Hi Fred', 'Fred', 'Barney'); + * // => 'Hi Barney' + */ + function replace() { + var args = arguments, + string = toString(args[0]); + + return args.length < 3 ? string : string.replace(args[1], args[2]); + } + + /** + * Converts `string` to + * [snake case](https://en.wikipedia.org/wiki/Snake_case). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the snake cased string. + * @example + * + * _.snakeCase('Foo Bar'); + * // => 'foo_bar' + * + * _.snakeCase('fooBar'); + * // => 'foo_bar' + * + * _.snakeCase('--FOO-BAR--'); + * // => 'foo_bar' + */ + var snakeCase = createCompounder(function(result, word, index) { + return result + (index ? '_' : '') + word.toLowerCase(); + }); + + /** + * Splits `string` by `separator`. + * + * **Note:** This method is based on + * [`String#split`](https://mdn.io/String/split). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to split. + * @param {RegExp|string} separator The separator pattern to split by. + * @param {number} [limit] The length to truncate results to. + * @returns {Array} Returns the string segments. + * @example + * + * _.split('a-b-c', '-', 2); + * // => ['a', 'b'] + */ + function split(string, separator, limit) { + if (limit && typeof limit != 'number' && isIterateeCall(string, separator, limit)) { + separator = limit = undefined; + } + limit = limit === undefined ? MAX_ARRAY_LENGTH : limit >>> 0; + if (!limit) { + return []; + } + string = toString(string); + if (string && ( + typeof separator == 'string' || + (separator != null && !isRegExp(separator)) + )) { + separator = baseToString(separator); + if (!separator && hasUnicode(string)) { + return castSlice(stringToArray(string), 0, limit); + } + } + return string.split(separator, limit); + } + + /** + * Converts `string` to + * [start case](https://en.wikipedia.org/wiki/Letter_case#Stylistic_or_specialised_usage). + * + * @static + * @memberOf _ + * @since 3.1.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the start cased string. + * @example + * + * _.startCase('--foo-bar--'); + * // => 'Foo Bar' + * + * _.startCase('fooBar'); + * // => 'Foo Bar' + * + * _.startCase('__FOO_BAR__'); + * // => 'FOO BAR' + */ + var startCase = createCompounder(function(result, word, index) { + return result + (index ? ' ' : '') + upperFirst(word); + }); + + /** + * Checks if `string` starts with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=0] The position to search from. + * @returns {boolean} Returns `true` if `string` starts with `target`, + * else `false`. + * @example + * + * _.startsWith('abc', 'a'); + * // => true + * + * _.startsWith('abc', 'b'); + * // => false + * + * _.startsWith('abc', 'b', 1); + * // => true + */ + function startsWith(string, target, position) { + string = toString(string); + position = position == null + ? 0 + : baseClamp(toInteger(position), 0, string.length); + + target = baseToString(target); + return string.slice(position, position + target.length) == target; + } + + /** + * Creates a compiled template function that can interpolate data properties + * in "interpolate" delimiters, HTML-escape interpolated data properties in + * "escape" delimiters, and execute JavaScript in "evaluate" delimiters. Data + * properties may be accessed as free variables in the template. If a setting + * object is given, it takes precedence over `_.templateSettings` values. + * + * **Note:** In the development build `_.template` utilizes + * [sourceURLs](http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl) + * for easier debugging. + * + * For more information on precompiling templates see + * [lodash's custom builds documentation](https://lodash.com/custom-builds). + * + * For more information on Chrome extension sandboxes see + * [Chrome's extensions documentation](https://developer.chrome.com/extensions/sandboxingEval). + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The template string. + * @param {Object} [options={}] The options object. + * @param {RegExp} [options.escape=_.templateSettings.escape] + * The HTML "escape" delimiter. + * @param {RegExp} [options.evaluate=_.templateSettings.evaluate] + * The "evaluate" delimiter. + * @param {Object} [options.imports=_.templateSettings.imports] + * An object to import into the template as free variables. + * @param {RegExp} [options.interpolate=_.templateSettings.interpolate] + * The "interpolate" delimiter. + * @param {string} [options.sourceURL='lodash.templateSources[n]'] + * The sourceURL of the compiled template. + * @param {string} [options.variable='obj'] + * The data object variable name. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the compiled template function. + * @example + * + * // Use the "interpolate" delimiter to create a compiled template. + * var compiled = _.template('hello <%= user %>!'); + * compiled({ 'user': 'fred' }); + * // => 'hello fred!' + * + * // Use the HTML "escape" delimiter to escape data property values. + * var compiled = _.template('<%- value %>'); + * compiled({ 'value': '