diff --git a/.github/workflows/keyfactor-starter-workflow.yml b/.github/workflows/keyfactor-starter-workflow.yml new file mode 100644 index 00000000..a018ba54 --- /dev/null +++ b/.github/workflows/keyfactor-starter-workflow.yml @@ -0,0 +1,26 @@ +name: Starter Workflow +on: [workflow_dispatch, push, pull_request] + +jobs: + call-create-github-release-workflow: + uses: Keyfactor/actions/.github/workflows/github-release.yml@main + + call-dotnet-build-and-release-workflow: + needs: [call-create-github-release-workflow] + uses: Keyfactor/actions/.github/workflows/dotnet-build-and-release.yml@main + with: + release_version: ${{ needs.call-create-github-release-workflow.outputs.release_version }} + release_url: ${{ needs.call-create-github-release-workflow.outputs.release_url }} + release_dir: RemoteFile/bin/Release + secrets: + token: ${{ secrets.PRIVATE_PACKAGE_ACCESS }} + + call-generate-readme-workflow: + if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' + uses: Keyfactor/actions/.github/workflows/generate-readme.yml@main + + call-update-catalog-workflow: + if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' + uses: Keyfactor/actions/.github/workflows/update-catalog.yml@main + secrets: + token: ${{ secrets.SDK_SYNC_PAT }} diff --git a/Certificate Store Type CURL Scripts/JKS.curl b/Certificate Store Type CURL Scripts/JKS.curl new file mode 100644 index 00000000..e9cd4603 --- /dev/null +++ b/Certificate Store Type CURL Scripts/JKS.curl @@ -0,0 +1,40 @@ +###CURL script to create JKS certificate store type + +###Replacement Variables - Manually replace these before running### +# {URL} - Base URL for your Keyfactor deployment +# {UserName} - User name with access to run Keyfactor APIs +# {UserPassword} - Password for the UserName above + +curl -X POST {URL}/keyfactorapi/certificatestoretypes -H "Content-Type: application/json" -H "x-keyfactor-requested-with: APIClient" -u {UserName}:{UserPassword} -d '{ + "Name": "RFJKS", + "ShortName": "RFJKS", + "Capability": "RFJKS", + "ServerRequired": true, + "BlueprintAllowed": false, + "CustomAliasAllowed": "Required", + "PowerShell": false, + "PrivateKeyAllowed": "Optional", + "SupportedOperations": { + "Add": true, + "Create": true, + "Discovery": true, + "Enrollment": false, + "Remove": true + }, + "PasswordOptions": { + "Style": "Default", + "EntrySupported": false, + "StoreRequired": true + }, + "Properties": [ + { + "Name": "LinuxFilePermissionsOnStoreCreation", + "DisplayName": "Linux File Permissions on Store Creation", + "Required": false, + "DependsOn": "", + "Type": "String", + "DefaultValue": "" + } + ], + "EntryParameters": [] +}' diff --git a/Certificate Store Type CURL Scripts/PEM.curl b/Certificate Store Type CURL Scripts/PEM.curl new file mode 100644 index 00000000..887a4c72 --- /dev/null +++ b/Certificate Store Type CURL Scripts/PEM.curl @@ -0,0 +1,64 @@ +###CURL script to create PEM certificate store type + +###Replacement Variables - Manually replace these before running### +# {URL} - Base URL for your Keyfactor deployment +# {UserName} - User name with access to run Keyfactor APIs +# {UserPassword} - Password for the UserName above + +curl -X POST {URL}/keyfactorapi/certificatestoretypes -H "Content-Type: application/json" -H "x-keyfactor-requested-with: APIClient" -u {UserName}:{UserPassword} -d '{ + "Name": "RFPEM", + "ShortName": "RFPEM", + "Capability": "RFPEM", + "ServerRequired": true, + "BlueprintAllowed": false, + "CustomAliasAllowed": "Forbidden", + "PowerShell": false, + "PrivateKeyAllowed": "Optional", + "SupportedOperations": { + "Add": true, + "Create": true, + "Discovery": true, + "Enrollment": false, + "Remove": true + }, + "PasswordOptions": { + "Style": "Default", + "EntrySupported": false, + "StoreRequired": true + }, + "Properties": [ + { + "Name": "LinuxFilePermissionsOnStoreCreation", + "DisplayName": "Linux File Permissions on Store Creation", + "Required": false, + "DependsOn": "", + "Type": "String", + "DefaultValue": "" + }, + { + "Name": "IsTrustStore", + "DisplayName": "Trust Store", + "Required": false, + "DependsOn": "", + "Type": "Bool", + "DefaultValue": false + }, + { + "Name": "IncludesChain", + "DisplayName": "Store Includes Chain", + "Required": false, + "DependsOn": "", + "Type": "Bool", + "DefaultValue": false + }, + { + "Name": "SeparatePrivateKeyFilePath", + "DisplayName": "Separate Private Key File Location", + "Required": false, + "DependsOn": "", + "Type": "String", + "DefaultValue": "" + } + ], + "EntryParameters": [] +}' diff --git a/Certificate Store Type CURL Scripts/PKCS12.curl b/Certificate Store Type CURL Scripts/PKCS12.curl new file mode 100644 index 00000000..cd495ff7 --- /dev/null +++ b/Certificate Store Type CURL Scripts/PKCS12.curl @@ -0,0 +1,40 @@ +###CURL script to create PKCS12 certificate store type + +###Replacement Variables - Manually replace these before running### +# {URL} - Base URL for your Keyfactor deployment +# {UserName} - User name with access to run Keyfactor APIs +# {UserPassword} - Password for the UserName above + +curl -X POST {URL}/keyfactorapi/certificatestoretypes -H "Content-Type: application/json" -H "x-keyfactor-requested-with: APIClient" -u {UserName}:{UserPassword} -d '{ + "Name": "RFPkcs12", + "ShortName": "RFPkcs12", + "Capability": "RFPkcs12", + "ServerRequired": true, + "BlueprintAllowed": false, + "CustomAliasAllowed": "Required", + "PowerShell": false, + "PrivateKeyAllowed": "Optional", + "SupportedOperations": { + "Add": true, + "Create": true, + "Discovery": true, + "Enrollment": false, + "Remove": true + }, + "PasswordOptions": { + "Style": "Default", + "EntrySupported": false, + "StoreRequired": true + }, + "Properties": [ + { + "Name": "LinuxFilePermissionsOnStoreCreation", + "DisplayName": "Linux File Permissions on Store Creation", + "Required": false, + "DependsOn": "", + "Type": "String", + "DefaultValue": "" + } + ], + "EntryParameters": [] +}' diff --git a/README.md b/README.md new file mode 100644 index 00000000..7f2aca17 --- /dev/null +++ b/README.md @@ -0,0 +1,267 @@ +# Remote File + +The Remote File Orchestrator allows for the remote management of file-based certificate stores. Discovery, Inventory, and Management functions are supported. The orchestrator performs operations by first converting the certificate store into a BouncyCastle PKCS12Store. + +#### Integration status: Production - Ready for use in production environments. + +## About the Keyfactor Universal Orchestrator Capability + +This repository contains a Universal Orchestrator Extension which is a plugin to the Keyfactor Universal Orchestrator. Within the Keyfactor Platform, Orchestrators are used to manage “certificate stores” — collections of certificates and roots of trust that are found within and used by various applications. + +The Universal Orchestrator is part of the Keyfactor software distribution and is available via the Keyfactor customer portal. For general instructions on installing Capabilities, see the “Keyfactor Command Orchestrator Installation and Configuration Guide” section of the Keyfactor documentation. For configuration details of this specific Capability, see below in this readme. + +The Universal Orchestrator is the successor to the Windows Orchestrator. This Capability plugin only works with the Universal Orchestrator and does not work with the Windows Orchestrator. + +--- + + + + +## Platform Specific Notes + +The Keyfactor Universal Orchestrator may be installed on either Windows or Linux based platforms. The certificate operations supported by a capability may vary based what platform the capability is installed on. The table below indicates what capabilities are supported based on which platform the encompassing Universal Orchestrator is running. +| Operation | Win | Linux | +|-----|-----|------| +|Supports Management Add|✓ |✓ | +|Supports Management Remove|✓ |✓ | +|Supports Create Store|✓ |✓ | +|Supports Discovery|✓ |✓ | +|Supports Renrollment| | | +|Supports Inventory|✓ |✓ | + + + +--- + + +## Overview +The Remote File Orchestrator Extension is a multi-purpose integration that can remotely manage a variety of file-based certificate stores and can easily be extended to manage others. The certificate store types that can be managed in the current version are: +- Java Keystores of type JKS +- PKCS12 files, including, but not limited to, Java keystores of type PKCS12 +- PEM files + +While the Keyfactor Universal Orchestrator (UO) can be installed on either Windows or Linux; likewise, the Remote File Orchestrator Extension can be used to manage certificate stores residing on both Windows and Linux servers. The supported configurations of Universal Orchestrator hosts and managed orchestrated servers are shown below: + +| | UO Installed on Windows | UO Installed on Linux | +|-----|-----|------| +|Orchestrated Server on remote Windows server|✓ | | +|Orchestrated Server on remote Linux server|✓ |✓ | +|Orchestrated Server on same server as orchestrator service (Agent)|✓ |✓ | + +This orchestrator extension makes use of an SSH connection to communicate remotely with certificate stores hosted on Linux servers and WinRM to communicate with certificate stores hosted on Windows servers. +  +  +## Versioning + +The version number of a the Remote File Orchestrator Extension can be verified by right clicking on the n the Extensions/RemoteFile installation folder, selecting Properties, and then clicking on the Details tab. +  +  +## Keyfactor Version Supported + +The Remote File Orchestrator Extension has been tested against Keyfactor Universal Orchestrator version 9.5, but should work against earlier or later versions of the Keyfactor Universal Orchestrator. +  +  +## Security Considerations + +**For Linux orchestrated servers:** +1. The Remote File Orchestrator Extension makes use of a few common Linux commands when managing stores on Linux servers. If the credentials you will be connecting with need elevated access to run these commands, you must set up the user id as a sudoer with no password necessary and set the config.json "UseSudo" value to "Y" (See "Config File Setup" later in this README for more information on setting up the config.json file). The full list of these commands below: + * echo + * find + * cp + * rm + * chown + * install + +2. The Remote File Orchestrator Extension makes use of SFTP and/or SCP to transfer files to and from the orchestrated server. SFTP/SCP cannot make use of sudo, so all folders containing certificate stores will need to allow SFTP/SCP file transfer. If this is not possible, set the values in the config.json apprpriately to use an alternative upload/download folder that does allow SFTP/SCP file transfer (See "Config File Setup" later in this README regarding the config.json file). + +**For Windows orchestrated servers:** +1. Make sure that WinRM is set up on the orchestrated server and that the WinRM port is part of the certificate store path when setting up your certificate stores When creating a new certificate store in Keyfactor Command (See "Creating Certificate Stores" later in this README). + +2. When creating/configuring a certificate store in Keyfactor Command, you will see a "Change Credentials" link after entering in the destination client machine (IP or DNS). This link **must** be clicked on to present the credentials dialog. However, it is not required that you enter separate credentials. Simply click SAVE in the resulting dialog without entering in credentials to use the credentials that the Keyfactor Orchestrator Service is running under. Alternatively, you may enter separate credentials into this dialog and use those to connect to the orchestrated server. + +**SSH Key-Based Authentiation** +1. When creating a Keyfactor certificate store for the remote file orchestrator extension (see "Creating Certificate Stores" later in this README, you may supply either a user id and password for the certificate store credentials (directly or through one of Keyfactor Command's PAM integrations), or a user id and SSH private key. Both PKCS#1 (BEGIN RSA PRIVATE KEY) and PKCS#8 (BEGIN PRIVATE KEY) formats are supported for the SSH private key. If using the normal Keyfactor Command credentials dialog without PAM integration, just copy and paste the full SSH private key into the Password textbox. +  +  +## Remote File Orchestrator Extension Installation +1. Create the certificate store types you wish to manage. Please refer to the individual sections devoted to each supported store type under "Certificate Store Types" later in this README. +2. Stop the Keyfactor Universal Orchestrator Service for the orchestrator you plan to install this extension to run on. +3. In the Keyfactor Orchestrator installation folder (by convention usually C:\Program Files\Keyfactor\Keyfactor Orchestrator), find the "Extensions" folder. Underneath that, create a new folder named "RemoteFile". You may choose to use a different name if you wish. +4. Download the latest version of the RemoteFile orchestrator extension from [GitHub](https://github.com/Keyfactor/remote-file-orchestrator). Click on the "Latest" release link on the right hand side of the main page and download the first zip file. +5. Copy the contents of the download installation zip file to the folder created in Step 3. +6. (Optional) If you decide to create one or more certificate store types with short names different than the suggested values (please see the individual certificate store type sections in "Certificate Store Types" later in this README for more information regarding certificate store types), edit the manifest.json file in the folder you created in step 3, and modify each "ShortName" in each "Certstores.{ShortName}.{Operation}" line with the ShortName you used to create the respective certificate store type. If you created it with the suggested values, this step can be skipped. +7. Modify the config.json file (See the "Configuration File Setup" section later in this README) +8. Start the Keyfactor Universal Orchestrator Service. +  +  +## Configuration File Setup + +The Remote File Orchestrator Extension uses a JSON configuration file. It is located in the {Keyfactor Orchestrator Installation Folder}\Extensions\RemoteFile. None of the values are required, and a description of each follows below: +{ + "UseSudo": "N", + "CreateStoreIfMissing": "N", + "UseNegotiate": "N", + "SeparateUploadFilePath": "", + "FileTransferProtocol": "SCP", + "DefaultLinuxPermissionsOnStoreCreation": "600" +} + +**UseSudo** (Applicable for Linux orchestrated servers only) - Y/N - Determines whether to prefix certain Linux command with "sudo". This can be very helpful in ensuring that the user id running commands over an ssh connection uses "least permissions necessary" to process each task. Setting this value to "Y" will prefix all Linux commands with "sudo" with the expectation that the command being executed on the orchestrated Linux server will look in the sudoers file to determine whether the logged in ID has elevated permissions for that specific command. For Windows orchestrated servers, this setting has no effect. Setting this value to "N" will result in "sudo" not being added to Linux commands. **Default value if missing - N**. +**CreateStoreOnAddIfMissing** - Y/N - Determines, during a Management-Add job, if a certificate store should be created if it does not already exist. If set to "N", and the store referenced in the Management-Add job is not found, the job will return an error with a message stating that the store does not exist. If set to "Y", the store will be created and the certificate added to the certificate store. **Default value if missing - N**. +**UseNegotiateAuth** (Applicable for Windows orchestrated servers only) – Y/N - Determines if WinRM should use Negotiate (Y) when connecting to the remote server. **Default Value if missing - N**. +**SeparateUploadFilePath**(Applicable for Linux managed servers only) – Set this to the path you wish to use as the location on the orchestrated server to upload/download and later remove temporary work files when processing jobs. If set to "" or not provided, the location of the certificate store itself will be used. File transfer itself is performed using SCP or SFTP protocols (see FileT ransferProtocol setting). **Default Value if missing - blank**. +**FileTransferProtocol** (Applicable for Linux orchestrated servers only) - SCP/SFTP/Both - Determines the protocol to use when uploading/downloading files while processing a job. Valid values are: SCP - uses SCP, SFTP - uses SFTP, or Both - will attempt to use SCP first, and if that does not work, will attempt the file transfer via SFTP. **Default Value if missing - SCP**. +**DefaultLinuxPermissionsOnStoreCreation** (Applicable for Linux managed servers only) - Value must be 3 digits all between 0-7. The Linux file permissions that will be set on a new certificate store created via a Management Create job or a Management Add job where CreateStoreOnAddIsMissing is set to "Y". This value will be used for all certificate stores managed by this orchestrator instance unless overridden by the optional "Linux File Permissions on Store Creation" custom parameter setting on a specific certificate store (See the "Certificatee Store Types Supported" section later in this README). **Default Value if missing - 600**. +  +  +## Certificate Store Types + +When setting up the certificate store types you wish the Remote File Orchestrator Extension to manage, there are some common settings that will be the same for all supported types. To create a new Certificate Store Type in Keyfactor Command, first click on settings (the gear icon on the top right) => Certificate Store Types => Add. Alternatively, there are CURL scripts for all of the currently implemented certificate store types in the Certificate Store Type CURL Scripts folder in this repo if you wish to automate the creation of the desired store types. + +**Common Values:** +*Basic Tab:* +- **Name** – Required. The display name you wish to use for the new Certificate Store Type. +- **ShortName** - Required. See specific certificate store type instructions below. +- **Custom Capability** - Unchecked +- **Supported Job Types** - Inventory, Add, Remove, Create, and Discovery should all be checked. +- **Needs Server** - Checked +- **Blueprint Allowed** - Checked if you wish to mske use of blueprinting. Pleaes refer to the Keyfactor Command Reference Guide for more details on this feature. +- **Uses PoserShell** - Unchecked +- **Requires Store Password** - Checked. NOTE: This does not require that a certificate store have a password, but merely ensures that a user who creates a Keyfactor Command Certificate Store MUST click the Store Password button and either enter a password or check No Password. Certificate stores with no passwords are still possible for certain certificate store types when checking this option. +- **Supports Entry Password** - Unchecked. + +*Advanced Tab:* +- **Store Path Type** - Freeform +- **Supports Custom Alias** - See specific certificate store type instructions below. +- **Private Key Handling** - See specific certificate store type instructions below +- **PFX Password Style** - Default + +*Custom Fields Tab:* +- **Name:** linuxFilePermissionsOnStoreCreation, **Display Name:** Linux File Permissions on Store Creation, **Type:** String, **Default Value:** none. This custom field is **not required**. If not present, value reverts back to DefaultLinuxPermissionsOnStoreCreation setting in config.json (see Configuration File Setup section above). This value, applicable to certificate stores hosted on Linux orchestrated servers only, must be 3 digits all between 0-7. This represents the Linux file permissions that will be set for this certificate store if created via a Management Create job or a Management Add job where the config.json option CreateStoreOnAddIsMissing is set to "Y". + +Entry Parameters Tab: +- See specific certificate store type instructions below + +  +  +************************************** +**RFPkcs12 Certificate Store Type** +************************************** + +The RFPkcs12 store type can be used to manage any PKCS#12 compliant file format INCLUDING java keystores of type PKCS12. + +Use cases supported: +1. One-to-many trust entries - A single certificate without a private key in a certificate store. Each certificate identified with a custom alias or certificate thumbprint. +2. One-to-many key entries - One-to-many certificates with private keys and optionally the full certificate chain. Each certificate identified with a custom alias or certificate thumbprint. +3. A mix of trust and key entries. + +**Specific Certificate Store Type Values** +*Basic Tab:* +- **Short Name** – Required. Suggested value - **RFPkcs12**. If you choose to use a different value you must make the corresponding modification to the manifest.json file (see "Remote File Orchestrator Extension Installation", step 6 above). + +*Advanced Tab:* +- **Supports Custom Alias** - Required. +- **Private Key Handling** - Optional. + +*Custom Fields Tab:* +- no adittional custom fields/parameters + +Entry Parameters Tab: +- no additional entry parameters + +  +CURL script to automate certificate store type creation can be found [here](https://github.com/Keyfactor/remote-file-orchestrator/blob/initial-version/Certificate%20Store%20Type%20CURL%20Scripts/PKCS12.curl) + +  +  +************************************** +**RFJKS Certificate Store Type** +************************************** + +The RFJKS store type can be used to manage java keystores of type JKS. **PLEASE NOTE:** Java keystores of type PKCS12 **_cannot_** be managed by the RFJKS type. You **_must_** use RFPkcs12. + +Use cases supported: +1. One-to-many trust entries - A single certificate without a private key in a certificate store. Each certificate identified with a custom alias or certificate thumbprint. +2. One-to-many key entries - One-to-many certificates with private keys and optionally the full certificate chain. Each certificate identified with a custom alias or certificate thumbprint. +3. A mix of trust and key entries. + +**Specific Certificate Store Type Values** +*Basic Tab:* +- **Short Name** – Required. Suggested value - **RFJKS**. If you choose to use a different value you must make the corresponding modification to the manifest.json file (see "Remote File Orchestrator Extension Installation", step 6 above). + +*Advanced Tab:* +- **Supports Custom Alias** - Required. +- **Private Key Handling** - Optional. + +*Custom Fields Tab:* +- no adittional custom fields/parameters + +Entry Parameters Tab: +- no additional entry parameters + +  +CURL script to automate certificate store type creation can be found [here](https://github.com/Keyfactor/remote-file-orchestrator/blob/initial-version/Certificate%20Store%20Type%20CURL%20Scripts/JKS.curl) + +  +  +************************************** +**RFPEM Certificate Store Type** +************************************** + +The RFPEM store type can be used to manage PEM encoded files. + +Use cases supported: +1. Trust stores - A file with one-to-many certificates (no private keys, no certificate chains). +2. Single certificate stores with private key in the file. +3. Single certificate stores with certificate chain and private key in the file. +4. Single certificate stores with private key in an external file. +5. Single certificate stores with certificate chain in the file and private key in an external file + +**Specific Certificate Store Type Values** +*Basic Tab:* +- **Short Name** – Required. Suggested value - **RFPEM**. If you choose to use a different value you must make the corresponding modification to the manifest.json file (see "Remote File Orchestrator Extension Installation", step 6 above). + +*Advanced Tab:* +- **Supports Custom Alias** - Forbidden. +- **Private Key Handling** - Optional. + +*Custom Fields Tab:* +- **Name:** IsTrustStore, **Display Name:** Trust Store, **Type:** Bool, **Default Value:** false. This custom field is **not required**. Default value if not present is 'false'. If 'true', this store will be identified as a trust store. Any certificates attempting to be added via a Management-Add job that contain a private key will raise an error with an accompanying message. Multiple certificates may be added to the store in this use case. If set to 'false', this store can only contain a single certificate with chain and private key. Management-Add jobs attempting to add a certificate without a private key to a store marked as IsTrustStore = 'false' will raise an error with an accompanying message. +- **Name:** IncludesChain, **Display Name:** Store Includes Chain, **Type:** Bool, **Default Value:** false. This custom field is **not required**. Default value if not present is 'false'. If 'true' the full certificate chain, if sent by Keyfactor Command, will be stored in the file. The order of appearance is always assumed to be 1) end entity certificate, 2) issuing CA certificate, and 3) root certificate. If additional CA tiers are applicable, the order will be end entity certificate up to the root CA certificate. if set to 'false', only the end entity certificate and private key will be stored in this store. This setting is only valid when IsTrustStore = false. +- **Name:** SeparatePrivateKeyFilePath, **Display Name:** Separate Private Key File Location, **Type:** String, **Default Value:** empty. This custom field is **not required**. If empty, or not provided, it will be assumed that the private key for the certificate stored in this file will be inside the same file as the certificate. If the full path AND file name is put here, that location will be used to store the private key as an external file. This setting is only valid when IsTrustStore = false. + +Entry Parameters Tab: +- no additional entry parameters + +  +CURL script to automate certificate store type creation can be found [here](https://github.com/Keyfactor/remote-file-orchestrator/blob/initial-version/Certificate%20Store%20Type%20CURL%20Scripts/PEM.curl) +  +  +## Creating Certificate Stores + +Please refer to the Keyfactor Command Reference Guide for information on creating certificate stores in Keyfactor Command. However, there are two fields that are important to highlight here - Client Machine and Store Path. For Linux orchestrated servers, "Client Machine" should be the DNS or IP address of the remote orchestrated server while "Store Path" is the full path and file name of the file based store, beginning with a forward slash (/). For Windows orchestrated servers, "Client Machine" should be of the format {protocol}://{dns-or-ip}:{port} where {protocol} is either http or https, {dns-or-ip} is the DNS or IP address of the remote orchestrated server, and {port} is the port where WinRM is listening, by convention usually 5985 for http and 5986 for https. "Store Path" is the full path and file name of the file based store, beginning with a drive letter (i.e. c:\). +  +  +## Developer Notes + +The Remote File Orchestrator Extension is meant to be extended to be used for other file based certificate store types than the ones referenced above. The advantage to extending this integration rather than creating a new one is that the configuration, remoting, and Inventory/Management/Discovery logic is already written. The developer needs to only implement a few classes and write code to convert the destired file based store to a common format. This section describes the steps necessary to add additional store/file types. Please note that familiarity with the [.Net Core BouncyCastle cryptography library](https://github.com/bcgit/bc-csharp) is a prerequisite for adding additional supported file/store types. + +Steps to create a new supported file based certificate store type: + +1. Clone this repository from GitHub +2. Open the .net core solution in the IDE of your choice +3. Under the ImplementationStoreTypes folder, create a new folder named for the new certificate store type +4. Create a new class (with namespace of Keyfactor.Extensions.Orchestrator.RemoteFile.{NewType}) in the new folder that will implement ICertificateStoreSerializer. By convention, {StoreTypeName}CertificateSerializer would be a good choice for the class name. This interface requires you to implement two methods: DesrializeRemoteCertificateStore and SerializeRemoteCertificateStore. The first method will be called passing in a byte array containing the contents of file based store you are managing. The developer will need to convert that to an Org.BouncyCastle.Pkcs.Pkcs12Store class and return it. The second method takes in an Org.BouncyCastle.Pkcs.Pkcs12Store and converts it to a collection of custom file representations, List. This is where the majority of the development will be done. +5. Create an Inventory.cs class (with namespace of Keyfactor.Extensions.Orchestrator.RemoteFile.{NewType}) under the new folder and have it inherit InventoryBase. Override the internal GetCertificateStoreSerializer() method with a one line implementation returning a new instantiation of the class created in step 4. +6. Create a Management.cs class (with namespace of Keyfactor.Extensions.Orchestrator.RemoteFile.{NewType}) under the new folder and have it inherit ManagementBase. Override the internal GetCertificateStoreSerializer() method with a one line implementation returning a new instantiation of the class created in step 4. +7. Modify the manifest.json file to add three new sections (for Inventory, Management, and Discovery). Make sure for each, the "NewType" in Certstores.{NewType}.{Operation}, matches what you will use for the certificate store type short name in Keyfactor Command. On the "TypeFullName" line for all three sections, make sure the namespace matches what you used for your new classes. Note that the namespace for Discovery uses a common class for all supported types. Discovery is a common implementation for all supported store types. +8. After compiling, move all compiled files, including the config.json and manifest.json to {Keyfactor Orchestrator Installation Folder}\Extensions\RemoteFile. +9. Create the certificate store type in Keyfactor Command +10. Add a new CURL script to build the proper Keyfactor Command certificate store type and place it under "Certificate Store Type CURL Scripts". The name of the file should match the ShortName you are using for the new store type. +11. Update the documenation in readme_source.md by adding a new section under "Certificate Store Types" for this new supported file based store type. Include a pointer to the CURL script created in step 10. +  +  +## License +[Apache](https://apache.org/licenses/LICENSE-2.0) + + diff --git a/RemoteFile.sln b/RemoteFile.sln new file mode 100644 index 00000000..cc0e53ff --- /dev/null +++ b/RemoteFile.sln @@ -0,0 +1,25 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.31702.278 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RemoteFile", "RemoteFile\RemoteFile.csproj", "{A006BFAB-20F7-4F42-8B5F-591268ACE836}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {A006BFAB-20F7-4F42-8B5F-591268ACE836}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A006BFAB-20F7-4F42-8B5F-591268ACE836}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A006BFAB-20F7-4F42-8B5F-591268ACE836}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A006BFAB-20F7-4F42-8B5F-591268ACE836}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {8F3245C7-FCC9-4666-99E0-F8D63BBE8373} + EndGlobalSection +EndGlobal diff --git a/RemoteFile/ApplicationSettings.cs b/RemoteFile/ApplicationSettings.cs new file mode 100644 index 00000000..a39f043c --- /dev/null +++ b/RemoteFile/ApplicationSettings.cs @@ -0,0 +1,107 @@ +using System; +using System.Collections.Generic; +using System.IO; + +using Newtonsoft.Json; +using Microsoft.Extensions.Logging; +using Keyfactor.Logging; + + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile +{ + class ApplicationSettings + { + public enum FileTransferProtocolEnum + { + SCP, + SFTP, + Both + } + + private const string DEFAULT_LINUX_PERMISSION_SETTING = "600"; + + private static Dictionary configuration; + + public static bool UseSudo { get { return configuration.ContainsKey("UseSudo") ? configuration["UseSudo"]?.ToUpper() == "Y" : false; } } + public static bool CreateStoreIfMissing { get { return configuration.ContainsKey("CreateStoreIfMissing") ? configuration["CreateStoreIfMissing"]?.ToUpper() == "Y" : false; } } + public static bool UseNegotiate { get { return configuration.ContainsKey("UseNegotiate") ? configuration["UseNegotiate"]?.ToUpper() == "Y" : false; } } + public static string SeparateUploadFilePath { get { return configuration.ContainsKey("SeparateUploadFilePath") ? AddTrailingSlash(configuration["SeparateUploadFilePath"]) : string.Empty; } } + public static string DefaultLinuxPermissionsOnStoreCreation { get { return configuration.ContainsKey("DefaultLinuxPermissionsOnStoreCreation") ? configuration["DefaultLinuxPermissionsOnStoreCreation"] : DEFAULT_LINUX_PERMISSION_SETTING; } } + public static FileTransferProtocolEnum FileTransferProtocol + { + get + { + string protocolNames = string.Empty; + foreach (string protocolName in Enum.GetNames(typeof(FileTransferProtocolEnum))) + { + protocolNames += protocolName + ", "; + } + protocolNames = protocolNames.Substring(0, protocolNames.Length - 2); + + if (!Enum.TryParse(configuration["FileTransferProtocol"], out FileTransferProtocolEnum protocol)) + throw new RemoteFileException($"Invalid optional config.json FileTransferProtocol option of {configuration["FileTransferProtocol"]}. If present, must be one of these values: {protocolNames}."); + return protocol; + } + } + + public static void Initialize(string configLocation) + { + ILogger logger = LogHandler.GetClassLogger(); + logger.MethodEntry(LogLevel.Debug); + + configuration = new Dictionary(); + configLocation = $"{Path.GetDirectoryName(configLocation)}{Path.DirectorySeparatorChar}config.json"; + string configContents = string.Empty; + + if (!File.Exists(configLocation)) + { + logger.LogDebug("config.json missing. Default values used for configuration."); + return; + } + + using (StreamReader sr = new StreamReader(configLocation)) + { + configContents = sr.ReadToEnd(); + logger.LogDebug($"Raw config.json contents: {configContents}"); + } + + if (String.IsNullOrEmpty(configContents)) + { + logger.LogDebug("config.json exists but empty. Default values used for configuration."); + return; + } + + configuration = JsonConvert.DeserializeObject>(configContents); + ValidateConfiguration(logger); + + logger.LogDebug("Configuration Settings:"); + foreach(KeyValuePair keyValue in configuration) + { + logger.LogDebug($" {keyValue.Key}: {keyValue.Value}"); + } + + logger.MethodExit(LogLevel.Debug); + } + + private static void ValidateConfiguration(ILogger logger) + { + if (!configuration.ContainsKey("UseSudo") || (configuration["UseSudo"].ToUpper() != "Y" && configuration["UseSudo"].ToUpper() != "N")) + logger.LogDebug($"Missing or invalid configuration parameter - UseSudo. Will set to default value of 'False'"); + if (!configuration.ContainsKey("CreateStoreIfMissing") || (configuration["CreateStoreIfMissing"].ToUpper() != "Y" && configuration["CreateStoreIfMissing"].ToUpper() != "N")) + logger.LogDebug($"Missing or invalid configuration parameter - CreateStoreIfMissing. Will set to default value of 'False'"); + if (!configuration.ContainsKey("UseNegotiate") || (configuration["UseNegotiate"].ToUpper() != "Y" && configuration["UseNegotiate"].ToUpper() != "N")) + logger.LogDebug($"Missing or invalid configuration parameter - UseNegotiate. Will set to default value of 'False'"); + if (!configuration.ContainsKey("SeparateUploadFilePath")) + logger.LogDebug($"Missing configuration parameter - SeparateUploadFilePath. Will set to default value of ''"); + if (!configuration.ContainsKey("DefaultLinuxPermissionsOnStoreCreation")) + logger.LogDebug($"Missing configuration parameter - DefaultLinuxPermissionsOnStoreCreation. Will set to default value of '{DEFAULT_LINUX_PERMISSION_SETTING}'"); + if (!configuration.ContainsKey("FileTransferProtocol")) + logger.LogDebug($"Missing configuration parameter - FileTransferProtocol. Will set to default value of 'SCP'"); + } + + private static string AddTrailingSlash(string path) + { + return string.IsNullOrEmpty(path) ? path : path.Substring(path.Length - 1, 1) == @"/" ? path : path += @"/"; + } + } +} diff --git a/RemoteFile/Discovery.cs b/RemoteFile/Discovery.cs new file mode 100644 index 00000000..e26b0248 --- /dev/null +++ b/RemoteFile/Discovery.cs @@ -0,0 +1,89 @@ +// Copyright 2021 Keyfactor +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +using System; +using System.Collections.Generic; +using System.Linq; + +using Keyfactor.Logging; +using Keyfactor.Orchestrators.Extensions; +using Keyfactor.Orchestrators.Common.Enums; + +using Microsoft.Extensions.Logging; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile +{ + public class Discovery: IDiscoveryJobExtension + { + public string ExtensionName => ""; + + public JobResult ProcessJob(DiscoveryJobConfiguration config, SubmitDiscoveryUpdate submitDiscovery) + { + ILogger logger = LogHandler.GetClassLogger(this.GetType()); + logger.LogDebug($"Begin {config.Capability} for job id {config.JobId}..."); + logger.LogDebug($"Server: { config.ClientMachine }"); + logger.LogDebug($"Job Properties:"); + foreach (KeyValuePair keyValue in config.JobProperties) + { + logger.LogDebug($" {keyValue.Key}: {keyValue.Value}"); + } + + string[] directoriesToSearch = config.JobProperties["dirs"].ToString().Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + string[] extensionsToSearch = config.JobProperties["extensions"].ToString().Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + string[] ignoredDirs = config.JobProperties["ignoreddirs"].ToString().Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + string[] filesTosearch = config.JobProperties["patterns"].ToString().Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + bool includeSymLinks = Convert.ToBoolean(config.JobProperties["symLinks"]); + + List locations = new List(); + + RemoteCertificateStore certificateStore = new RemoteCertificateStore(config.ClientMachine, config.ServerUsername, config.ServerPassword, directoriesToSearch[0].Substring(0, 1) == "/" ? RemoteCertificateStore.ServerTypeEnum.Linux : RemoteCertificateStore.ServerTypeEnum.Windows); + + try + { + ApplicationSettings.Initialize(this.GetType().Assembly.Location); + + if (directoriesToSearch.Length == 0) + throw new RemoteFileException("Blank or missing search directories for Discovery."); + if (extensionsToSearch.Length == 0) + throw new RemoteFileException("Blank or missing search extensions for Discovery."); + if (filesTosearch.Length == 0) + filesTosearch = new string[] { "*" }; + + locations = certificateStore.FindStores(directoriesToSearch, extensionsToSearch, filesTosearch, includeSymLinks); + foreach (string ignoredDir in ignoredDirs) + locations = locations.Where(p => !p.StartsWith(ignoredDir)).ToList(); + } + catch (Exception ex) + { + logger.LogError($"Exception for {config.Capability}: {RemoteFileException.FlattenExceptionMessages(ex, string.Empty)} for job id {config.JobId}"); + return new JobResult() { Result = OrchestratorJobStatusJobResult.Failure, JobHistoryId = config.JobHistoryId, FailureMessage = RemoteFileException.FlattenExceptionMessages(ex, $"Server {config.ClientMachine}:") }; + } + finally + { + certificateStore.Terminate(); + } + + try + { + logger.LogDebug($"Stores returned for {config.Capability}:"); + foreach (string location in locations) + { + logger.LogDebug($" {location}"); + } + submitDiscovery.Invoke(locations); + logger.LogDebug($"...End {config.Capability} job for job id {config.JobId}"); + return new JobResult() { Result = OrchestratorJobStatusJobResult.Success, JobHistoryId = config.JobHistoryId }; + } + catch (Exception ex) + { + string errorMessage = RemoteFileException.FlattenExceptionMessages(ex, string.Empty); + logger.LogError($"Exception returning store locations for {config.Capability}: {errorMessage} for job id {config.JobId}"); + return new JobResult() { Result = OrchestratorJobStatusJobResult.Failure, JobHistoryId = config.JobHistoryId, FailureMessage = $"Server {config.ClientMachine}: {errorMessage}" }; + } + } + } +} \ No newline at end of file diff --git a/RemoteFile/ExceptionHandler.cs b/RemoteFile/ExceptionHandler.cs new file mode 100644 index 00000000..a8ba3929 --- /dev/null +++ b/RemoteFile/ExceptionHandler.cs @@ -0,0 +1,29 @@ +// Copyright 2021 Keyfactor +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +using System; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile +{ + class RemoteFileException : ApplicationException + { + public RemoteFileException(string message) : base(message) + { } + + public RemoteFileException(string message, Exception ex) : base(message, ex) + { } + + public static string FlattenExceptionMessages(Exception ex, string message) + { + message += ex.Message + Environment.NewLine; + if (ex.InnerException != null) + message = FlattenExceptionMessages(ex.InnerException, message); + + return message; + } + } +} diff --git a/RemoteFile/ICertificateStoreSerializer.cs b/RemoteFile/ICertificateStoreSerializer.cs new file mode 100644 index 00000000..c847ad81 --- /dev/null +++ b/RemoteFile/ICertificateStoreSerializer.cs @@ -0,0 +1,14 @@ +using Org.BouncyCastle.Pkcs; +using Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers; +using Keyfactor.Extensions.Orchestrator.RemoteFile.Models; +using System.Collections.Generic; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile +{ + interface ICertificateStoreSerializer + { + Pkcs12Store DeserializeRemoteCertificateStore(byte[] storeContents, string storePassword, string storeProperties, IRemoteHandler remoteHandler); + + List SerializeRemoteCertificateStore(Pkcs12Store certificateStore, string storePath, string storePassword, string storeProperties, IRemoteHandler remoteHandler); + } +} diff --git a/RemoteFile/ImplementedStoreTypes/JKS/Inventory.cs b/RemoteFile/ImplementedStoreTypes/JKS/Inventory.cs new file mode 100644 index 00000000..3334a794 --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/JKS/Inventory.cs @@ -0,0 +1,11 @@ + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.JKS +{ + public class Inventory : InventoryBase + { + internal override ICertificateStoreSerializer GetCertificateStoreSerializer() + { + return new JKSCertificateStoreSerializer(); + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/JKS/JKSCertificateStoreSerializer.cs b/RemoteFile/ImplementedStoreTypes/JKS/JKSCertificateStoreSerializer.cs new file mode 100644 index 00000000..55fba00b --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/JKS/JKSCertificateStoreSerializer.cs @@ -0,0 +1,113 @@ +using System.IO; +using System.Collections.Generic; +using Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers; +using Keyfactor.Extensions.Orchestrator.RemoteFile.Models; + +using Keyfactor.Logging; + +using Org.BouncyCastle.Pkcs; +using Org.BouncyCastle.Crypto; +using Org.BouncyCastle.X509; + +using Microsoft.Extensions.Logging; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.JKS +{ + class JKSCertificateStoreSerializer : ICertificateStoreSerializer + { + private ILogger logger; + + public JKSCertificateStoreSerializer() + { + logger = LogHandler.GetClassLogger(this.GetType()); + } + + public Pkcs12Store DeserializeRemoteCertificateStore(byte[] storeContents, string storePassword, string storeProperties, IRemoteHandler remoteHandler) + { + logger.MethodEntry(LogLevel.Debug); + + Pkcs12StoreBuilder storeBuilder = new Pkcs12StoreBuilder(); + Pkcs12Store pkcs12Store = storeBuilder.Build(); + Pkcs12Store pkcs12StoreNew = storeBuilder.Build(); + + JksStore jksStore = new JksStore(); + + using (MemoryStream ms = new MemoryStream(storeContents)) + { + jksStore.Load(ms, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray()); + } + + foreach(string alias in jksStore.Aliases) + { + if (jksStore.IsKeyEntry(alias)) + { + AsymmetricKeyParameter keyParam = jksStore.GetKey(alias, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray()); + AsymmetricKeyEntry keyEntry = new AsymmetricKeyEntry(keyParam); + + X509Certificate[] certificateChain = jksStore.GetCertificateChain(alias); + List certificateChainEntries = new List(); + foreach (X509Certificate certificate in certificateChain) + { + certificateChainEntries.Add(new X509CertificateEntry(certificate)); + } + + pkcs12Store.SetKeyEntry(alias, keyEntry, certificateChainEntries.ToArray()); + } + else + { + pkcs12Store.SetCertificateEntry(alias, new X509CertificateEntry(jksStore.GetCertificate(alias))); + } + } + + // Second Pkcs12Store necessary because of an obscure BC bug where creating a Pkcs12Store without .Load (code above using "Set" methods only) does not set all internal hashtables necessary to avoid an error later + // when processing store. + MemoryStream ms2 = new MemoryStream(); + pkcs12Store.Save(ms2, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray(), new Org.BouncyCastle.Security.SecureRandom()); + ms2.Position = 0; + + pkcs12StoreNew.Load(ms2, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray()); + + logger.MethodExit(LogLevel.Debug); + return pkcs12StoreNew; + } + + public List SerializeRemoteCertificateStore(Pkcs12Store certificateStore, string storePath, string storePassword, string storeProperties, IRemoteHandler remoteHandler) + { + logger.MethodEntry(LogLevel.Debug); + + JksStore jksStore = new JksStore(); + + foreach(string alias in certificateStore.Aliases) + { + if (certificateStore.IsKeyEntry(alias)) + { + AsymmetricKeyEntry keyEntry = certificateStore.GetKey(alias); + X509CertificateEntry[] certificateChain = certificateStore.GetCertificateChain(alias); + + List certificates = new List(); + foreach(X509CertificateEntry certificateEntry in certificateChain) + { + certificates.Add(certificateEntry.Certificate); + } + + jksStore.SetKeyEntry(alias, keyEntry.Key, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray(), certificates.ToArray()); + } + else + { + jksStore.SetCertificateEntry(alias, certificateStore.GetCertificate(alias).Certificate); + } + } + + using (MemoryStream outStream = new MemoryStream()) + { + jksStore.Save(outStream, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray()); + + List storeInfo = new List(); + storeInfo.Add(new SerializedStoreInfo() { FilePath = storePath, Contents = outStream.ToArray() }); + + logger.MethodExit(LogLevel.Debug); + return storeInfo; + } + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/JKS/JksStore.cs b/RemoteFile/ImplementedStoreTypes/JKS/JksStore.cs new file mode 100644 index 00000000..2877134c --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/JKS/JksStore.cs @@ -0,0 +1,613 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +using Org.BouncyCastle.Asn1; +using Org.BouncyCastle.Asn1.Pkcs; +using Org.BouncyCastle.Asn1.X509; +using Org.BouncyCastle.Crypto; +using Org.BouncyCastle.Crypto.IO; +using Org.BouncyCastle.Pkcs; +using Org.BouncyCastle.Security; +using Org.BouncyCastle.Utilities; +using Org.BouncyCastle.Utilities.Date; +using Org.BouncyCastle.Utilities.IO; +using Org.BouncyCastle.X509; + +using Keyfactor.PKI.X509; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.JKS +{ + public class JksStore + { + private static readonly int Magic = unchecked((int)0xFEEDFEED); + + private static readonly AlgorithmIdentifier JksObfuscationAlg = new AlgorithmIdentifier( + new DerObjectIdentifier("1.3.6.1.4.1.42.2.17.1.1"), DerNull.Instance); + + private readonly Dictionary m_certificateEntries = + new Dictionary(StringComparer.OrdinalIgnoreCase); + private readonly Dictionary m_keyEntries = + new Dictionary(StringComparer.OrdinalIgnoreCase); + + public JksStore() + { + } + + /// + public bool Probe(Stream stream) + { + using (var br = new BinaryReader(stream)) + try + { + return Magic == ReadInt32(br); + } + catch (EndOfStreamException) + { + return false; + } + } + + /// + public AsymmetricKeyParameter GetKey(string alias, char[] password) + { + if (alias == null) + throw new ArgumentNullException(nameof(alias)); + if (password == null) + throw new ArgumentNullException(nameof(password)); + + if (!m_keyEntries.TryGetValue(alias, out JksKeyEntry keyEntry)) + return null; + + if (!JksObfuscationAlg.Equals(keyEntry.keyData.EncryptionAlgorithm)) + throw new IOException("unknown encryption algorithm"); + + byte[] encryptedData = keyEntry.keyData.GetEncryptedData(); + + // key length is encryptedData - salt - checksum + int pkcs8Len = encryptedData.Length - 40; + + IDigest digest = DigestUtilities.GetDigest("SHA-1"); + + // key decryption + byte[] keyStream = CalculateKeyStream(digest, password, encryptedData, pkcs8Len); + byte[] pkcs8Key = new byte[pkcs8Len]; + for (int i = 0; i < pkcs8Len; ++i) + { + pkcs8Key[i] = (byte)(encryptedData[20 + i] ^ keyStream[i]); + } + Array.Clear(keyStream, 0, keyStream.Length); + + // integrity check + byte[] checksum = GetKeyChecksum(digest, password, pkcs8Key); + + if (!Arrays.ConstantTimeAreEqual(20, encryptedData, pkcs8Len + 20, checksum, 0)) + throw new IOException("cannot recover key"); + + return PrivateKeyFactory.CreateKey(pkcs8Key); + } + + private byte[] GetKeyChecksum(IDigest digest, char[] password, byte[] pkcs8Key) + { + AddPassword(digest, password); + + return DigestUtilities.DoFinal(digest, pkcs8Key); + } + + private byte[] CalculateKeyStream(IDigest digest, char[] password, byte[] salt, int count) + { + byte[] keyStream = new byte[count]; + byte[] hash = Arrays.CopyOf(salt, 20); + + int index = 0; + while (index < count) + { + AddPassword(digest, password); + + digest.BlockUpdate(hash, 0, hash.Length); + digest.DoFinal(hash, 0); + + int length = System.Math.Min(hash.Length, keyStream.Length - index); + Array.Copy(hash, 0, keyStream, index, length); + index += length; + } + + return keyStream; + } + + public X509Certificate[] GetCertificateChain(string alias) + { + if (m_keyEntries.TryGetValue(alias, out var keyEntry)) + return CloneChain(keyEntry.chain); + + return null; + } + + public X509Certificate GetCertificate(string alias) + { + if (m_certificateEntries.TryGetValue(alias, out var certEntry)) + return certEntry.cert; + + if (m_keyEntries.TryGetValue(alias, out var keyEntry)) + return keyEntry.chain?[0]; + + return null; + } + + public DateTime? GetCreationDate(string alias) + { + if (m_certificateEntries.TryGetValue(alias, out var certEntry)) + return certEntry.date; + + if (m_keyEntries.TryGetValue(alias, out var keyEntry)) + return keyEntry.date; + + return null; + } + + /// + public void SetKeyEntry(string alias, AsymmetricKeyParameter key, char[] password, X509Certificate[] chain) + { + alias = ConvertAlias(alias); + + if (ContainsAlias(alias)) + throw new IOException("alias [" + alias + "] already in use"); + + byte[] pkcs8Key = PrivateKeyInfoFactory.CreatePrivateKeyInfo(key).GetEncoded(); + byte[] protectedKey = new byte[pkcs8Key.Length + 40]; + + SecureRandom rnd = new SecureRandom(); + rnd.NextBytes(protectedKey, 0, 20); + + IDigest digest = DigestUtilities.GetDigest("SHA-1"); + + byte[] checksum = GetKeyChecksum(digest, password, pkcs8Key); + Array.Copy(checksum, 0, protectedKey, 20 + pkcs8Key.Length, 20); + + byte[] keyStream = CalculateKeyStream(digest, password, protectedKey, pkcs8Key.Length); + for (int i = 0; i != keyStream.Length; i++) + { + protectedKey[20 + i] = (byte)(pkcs8Key[i] ^ keyStream[i]); + } + Array.Clear(keyStream, 0, keyStream.Length); + + try + { + var epki = new EncryptedPrivateKeyInfo(JksObfuscationAlg, protectedKey); + m_keyEntries.Add(alias, new JksKeyEntry(DateTime.UtcNow, epki.GetEncoded(), CloneChain(chain))); + } + catch (Exception e) + { + throw new IOException("unable to encode encrypted private key", e); + } + } + + /// + public void SetKeyEntry(string alias, byte[] key, X509Certificate[] chain) + { + alias = ConvertAlias(alias); + + if (ContainsAlias(alias)) + throw new IOException("alias [" + alias + "] already in use"); + + m_keyEntries.Add(alias, new JksKeyEntry(DateTime.UtcNow, key, CloneChain(chain))); + } + + /// + public void SetCertificateEntry(string alias, X509Certificate cert) + { + alias = ConvertAlias(alias); + + if (ContainsAlias(alias)) + throw new IOException("alias [" + alias + "] already in use"); + + m_certificateEntries.Add(alias, new JksTrustedCertEntry(DateTime.UtcNow, cert)); + } + + public void DeleteEntry(string alias) + { + if (!m_keyEntries.Remove(alias)) + { + m_certificateEntries.Remove(alias); + } + } + + public IEnumerable Aliases + { + get + { + var aliases = new HashSet(m_certificateEntries.Keys); + aliases.UnionWith(m_keyEntries.Keys); + // FIXME + //return CollectionUtilities.Proxy(aliases); + return aliases; + } + } + + public bool ContainsAlias(string alias) + { + return IsCertificateEntry(alias) || IsKeyEntry(alias); + } + + public int Count + { + get { return m_certificateEntries.Count + m_keyEntries.Count; } + } + + public bool IsKeyEntry(string alias) + { + return m_keyEntries.ContainsKey(alias); + } + + public bool IsCertificateEntry(string alias) + { + return m_certificateEntries.ContainsKey(alias); + } + + public string GetCertificateAlias(X509Certificate cert) + { + foreach (var entry in m_certificateEntries) + { + if (entry.Value.cert.Equals(cert)) + return entry.Key; + } + return null; + } + + /// + public void Save(Stream stream, char[] password) + { + if (stream == null) + throw new ArgumentNullException(nameof(stream)); + if (password == null) + throw new ArgumentNullException(nameof(password)); + + IDigest checksumDigest = CreateChecksumDigest(password); + BinaryWriter bw = new BinaryWriter(new DigestStream(stream, null, checksumDigest)); + + WriteInt32(bw, Magic); + WriteInt32(bw, 2); + + WriteInt32(bw, Count); + + foreach (var entry in m_keyEntries) + { + string alias = entry.Key; + JksKeyEntry keyEntry = entry.Value; + + WriteInt32(bw, 1); + WriteUtf(bw, alias); + WriteDateTime(bw, keyEntry.date); + WriteBufferWithLength(bw, keyEntry.keyData.GetEncoded()); + + X509Certificate[] chain = keyEntry.chain; + int chainLength = chain == null ? 0 : chain.Length; + WriteInt32(bw, chainLength); + for (int i = 0; i < chainLength; ++i) + { + WriteTypedCertificate(bw, chain[i]); + } + } + + foreach (var entry in m_certificateEntries) + { + string alias = entry.Key; + JksTrustedCertEntry certEntry = entry.Value; + + WriteInt32(bw, 2); + WriteUtf(bw, alias); + WriteDateTime(bw, certEntry.date); + WriteTypedCertificate(bw, certEntry.cert); + } + + byte[] checksum = DigestUtilities.DoFinal(checksumDigest); + bw.Write(checksum); + bw.Flush(); + } + + /// + public void Load(Stream stream, char[] password) + { + if (stream == null) + throw new ArgumentNullException(nameof(stream)); + + m_certificateEntries.Clear(); + m_keyEntries.Clear(); + + ErasableByteStream storeStream = ValidateStream(stream, password); + try + { + BinaryReader dIn = new BinaryReader(storeStream); + + int magic = ReadInt32(dIn); + int storeVersion = ReadInt32(dIn); + + if (!(magic == Magic && (storeVersion == 1 || storeVersion == 2))) + throw new IOException("Invalid keystore format"); + + int numEntries = ReadInt32(dIn); + + for (int t = 0; t < numEntries; t++) + { + int tag = ReadInt32(dIn); + + switch (tag) + { + case 1: // keys + { + string alias = ReadUtf(dIn); + DateTime date = ReadDateTime(dIn); + + // encrypted key data + byte[] keyData = ReadBufferWithLength(dIn); + + // certificate chain + int chainLength = ReadInt32(dIn); + X509Certificate[] chain = null; + if (chainLength > 0) + { + var certs = new List(System.Math.Min(10, chainLength)); + for (int certNo = 0; certNo != chainLength; certNo++) + { + certs.Add(ReadTypedCertificate(dIn, storeVersion)); + } + chain = certs.ToArray(); + } + m_keyEntries.Add(alias, new JksKeyEntry(date, keyData, chain)); + break; + } + case 2: // certificate + { + string alias = ReadUtf(dIn); + DateTime date = ReadDateTime(dIn); + + X509Certificate cert = ReadTypedCertificate(dIn, storeVersion); + + m_certificateEntries.Add(alias, new JksTrustedCertEntry(date, cert)); + break; + } + default: + throw new IOException("unable to discern entry type"); + } + } + + if (storeStream.Position != storeStream.Length) + throw new IOException("password incorrect or store tampered with"); + } + finally + { + storeStream.Erase(); + } + } + + /* + * Validate password takes the checksum of the store and will either. + * 1. If password is null, load the store into memory, return the result. + * 2. If password is not null, load the store into memory, test the checksum, and if successful return + * a new input stream instance of the store. + * 3. Fail if there is a password and an invalid checksum. + * + * @param inputStream The input stream. + * @param password the password. + * @return Either the passed in input stream or a new input stream. + */ + /// + private ErasableByteStream ValidateStream(Stream inputStream, char[] password) + { + byte[] rawStore = Streams.ReadAll(inputStream); + int checksumPos = rawStore.Length - 20; + + if (password != null) + { + byte[] checksum = CalculateChecksum(password, rawStore, 0, checksumPos); + + if (!Arrays.ConstantTimeAreEqual(20, checksum, 0, rawStore, checksumPos)) + { + Array.Clear(rawStore, 0, rawStore.Length); + throw new IOException("password incorrect or store tampered with"); + } + } + + return new ErasableByteStream(rawStore, 0, checksumPos); + } + + private static void AddPassword(IDigest digest, char[] password) + { + // Encoding.BigEndianUnicode + for (int i = 0; i < password.Length; ++i) + { + digest.Update((byte)(password[i] >> 8)); + digest.Update((byte)password[i]); + } + } + + private static byte[] CalculateChecksum(char[] password, byte[] buffer, int offset, int length) + { + IDigest checksumDigest = CreateChecksumDigest(password); + checksumDigest.BlockUpdate(buffer, offset, length); + return DigestUtilities.DoFinal(checksumDigest); + } + + private static X509Certificate[] CloneChain(X509Certificate[] chain) + { + return (X509Certificate[])chain?.Clone(); + } + + private static string ConvertAlias(string alias) + { + return alias.ToLowerInvariant(); + } + + private static IDigest CreateChecksumDigest(char[] password) + { + IDigest digest = DigestUtilities.GetDigest("SHA-1"); + AddPassword(digest, password); + + // + // This "Mighty Aphrodite" string goes all the way back to the + // first java betas in the mid 90's, why who knows? But see + // https://cryptosense.com/mighty-aphrodite-dark-secrets-of-the-java-keystore/ + // + byte[] prefix = Encoding.UTF8.GetBytes("Mighty Aphrodite"); + digest.BlockUpdate(prefix, 0, prefix.Length); + return digest; + } + + private static byte[] ReadBufferWithLength(BinaryReader br) + { + int length = ReadInt32(br); + return br.ReadBytes(length); + } + + private static DateTime ReadDateTime(BinaryReader br) + { + DateTime unixMs = DateTimeUtilities.UnixMsToDateTime(Longs.ReverseBytes(br.ReadInt64())); + DateTime utc = new DateTime(unixMs.Ticks, DateTimeKind.Utc); + return utc; + } + + private static short ReadInt16(BinaryReader br) + { + short n = br.ReadInt16(); + n = (short)(((n & 0xFF) << 8) | ((n >> 8) & 0xFF)); + return n; + } + + private static int ReadInt32(BinaryReader br) + { + return Integers.ReverseBytes(br.ReadInt32()); + } + + private static X509Certificate ReadTypedCertificate(BinaryReader br, int storeVersion) + { + if (storeVersion == 2) + { + string certFormat = ReadUtf(br); + if ("X.509" != certFormat) + throw new IOException("Unsupported certificate format: " + certFormat); + } + + byte[] certData = ReadBufferWithLength(br); + try + { + System.Security.Cryptography.X509Certificates.X509Certificate2 cert = new System.Security.Cryptography.X509Certificates.X509Certificate2(certData); + return DotNetUtilities.FromX509Certificate(cert); + } + finally + { + Array.Clear(certData, 0, certData.Length); + } + } + + private static string ReadUtf(BinaryReader br) + { + short length = ReadInt16(br); + byte[] utfBytes = br.ReadBytes(length); + + /* + * FIXME JKS actually uses a "modified UTF-8" format. For the moment we will just support single-byte + * encodings that aren't null bytes. + */ + for (int i = 0; i < utfBytes.Length; ++i) + { + byte utfByte = utfBytes[i]; + if (utfByte == 0 || (utfByte & 0x80) != 0) + throw new NotSupportedException("Currently missing support for modified UTF-8 encoding in JKS"); + } + + return Encoding.UTF8.GetString(utfBytes); + } + + private static void WriteBufferWithLength(BinaryWriter bw, byte[] buffer) + { + WriteInt32(bw, buffer.Length); + bw.Write(buffer); + } + + private static void WriteDateTime(BinaryWriter bw, DateTime dateTime) + { + bw.Write(Longs.ReverseBytes(DateTimeUtilities.DateTimeToUnixMs(dateTime.ToUniversalTime()))); + } + + private static void WriteInt16(BinaryWriter bw, short n) + { + n = (short)(((n & 0xFF) << 8) | ((n >> 8) & 0xFF)); + bw.Write(n); + } + + private static void WriteInt32(BinaryWriter bw, int n) + { + bw.Write(Integers.ReverseBytes(n)); + } + + private static void WriteTypedCertificate(BinaryWriter bw, X509Certificate cert) + { + WriteUtf(bw, "X.509"); + WriteBufferWithLength(bw, cert.GetEncoded()); + } + + private static void WriteUtf(BinaryWriter bw, string s) + { + byte[] utfBytes = Encoding.UTF8.GetBytes(s); + + /* + * FIXME JKS actually uses a "modified UTF-8" format. For the moment we will just support single-byte + * encodings that aren't null bytes. + */ + for (int i = 0; i < utfBytes.Length; ++i) + { + byte utfByte = utfBytes[i]; + if (utfByte == 0 || (utfByte & 0x80) != 0) + throw new NotSupportedException("Currently missing support for modified UTF-8 encoding in JKS"); + } + + WriteInt16(bw, Convert.ToInt16(utfBytes.Length)); + bw.Write(utfBytes); + } + + /** + * JksTrustedCertEntry is a internal container for the certificate entry. + */ + private sealed class JksTrustedCertEntry + { + internal readonly DateTime date; + internal readonly X509Certificate cert; + + internal JksTrustedCertEntry(DateTime date, X509Certificate cert) + { + this.date = date; + this.cert = cert; + } + } + + private sealed class JksKeyEntry + { + internal readonly DateTime date; + internal readonly EncryptedPrivateKeyInfo keyData; + internal readonly X509Certificate[] chain; + + internal JksKeyEntry(DateTime date, byte[] keyData, X509Certificate[] chain) + { + this.date = date; + this.keyData = EncryptedPrivateKeyInfo.GetInstance(Asn1Sequence.GetInstance(keyData)); + this.chain = chain; + } + } + + private sealed class ErasableByteStream + : MemoryStream + { + internal ErasableByteStream(byte[] buffer, int index, int count) + : base(buffer, index, count, true) + { + } + + internal void Erase() + { + Position = 0L; + Streams.WriteZeroes(this, Convert.ToInt32(Length)); + } + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/JKS/Management.cs b/RemoteFile/ImplementedStoreTypes/JKS/Management.cs new file mode 100644 index 00000000..8ae1fa4c --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/JKS/Management.cs @@ -0,0 +1,11 @@ + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.JKS +{ + public class Management : ManagementBase + { + internal override ICertificateStoreSerializer GetCertificateStoreSerializer() + { + return new JKSCertificateStoreSerializer(); + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/PEM/Inventory.cs b/RemoteFile/ImplementedStoreTypes/PEM/Inventory.cs new file mode 100644 index 00000000..1ee7c72a --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/PEM/Inventory.cs @@ -0,0 +1,11 @@ + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.PEM +{ + public class Inventory : InventoryBase + { + internal override ICertificateStoreSerializer GetCertificateStoreSerializer() + { + return new PEMCertificateStoreSerializer(); + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/PEM/Management.cs b/RemoteFile/ImplementedStoreTypes/PEM/Management.cs new file mode 100644 index 00000000..04164266 --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/PEM/Management.cs @@ -0,0 +1,11 @@ + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.PEM +{ + public class Management : ManagementBase + { + internal override ICertificateStoreSerializer GetCertificateStoreSerializer() + { + return new PEMCertificateStoreSerializer(); + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/PEM/PEMCertificateStoreSerializer.cs b/RemoteFile/ImplementedStoreTypes/PEM/PEMCertificateStoreSerializer.cs new file mode 100644 index 00000000..81b998b5 --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/PEM/PEMCertificateStoreSerializer.cs @@ -0,0 +1,226 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; + +using Newtonsoft.Json; + +using Keyfactor.Logging; +using Keyfactor.PKI.PrivateKeys; +using Keyfactor.PKI.X509; +using Keyfactor.PKI.PEM; +using Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers; +using Keyfactor.Extensions.Orchestrator.RemoteFile.Models; + +using Microsoft.Extensions.Logging; + +using Org.BouncyCastle.Crypto; +using Org.BouncyCastle.Pkcs; +using Org.BouncyCastle.X509; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.PEM +{ + class PEMCertificateStoreSerializer : ICertificateStoreSerializer + { + string[] PrivateKeyDelimeters = new string[] { "-----BEGIN PRIVATE KEY-----", "-----BEGIN ENCRYPTED PRIVATE KEY-----", "-----BEGIN RSA PRIVATE KEY-----" }; + string CertDelimBeg = "-----BEGIN CERTIFICATE-----"; + string CertDelimEnd = "-----END CERTIFICATE-----"; + + private bool IsTrustStore { get; set; } + private bool IncludesChain { get; set; } + private string SeparatePrivateKeyFilePath { get; set; } + + private ILogger logger; + + public PEMCertificateStoreSerializer() + { + logger = LogHandler.GetClassLogger(this.GetType()); + } + + public Pkcs12Store DeserializeRemoteCertificateStore(byte[] storeContentBytes, string storePassword, string storeProperties, IRemoteHandler remoteHandler) + { + logger.MethodEntry(LogLevel.Debug); + + LoadCustomProperties(storeProperties); + + Pkcs12StoreBuilder storeBuilder = new Pkcs12StoreBuilder(); + Pkcs12Store store = storeBuilder.Build(); + + string storeContents = Encoding.ASCII.GetString(storeContentBytes); + X509CertificateEntry[] certificates = GetCertificates(storeContents); + + if (IsTrustStore) + { + foreach(X509CertificateEntry certificate in certificates) + { + store.SetCertificateEntry(CertificateConverterFactory.FromBouncyCastleCertificate(certificate.Certificate).ToX509Certificate2().Thumbprint, certificate); + } + } + else + { + AsymmetricKeyEntry keyEntry = GetPrivateKey(storeContents, storePassword ?? string.Empty, remoteHandler); + store.SetKeyEntry(CertificateConverterFactory.FromBouncyCastleCertificate(certificates[0].Certificate).ToX509Certificate2().Thumbprint, keyEntry, certificates); + } + + // Second Pkcs12Store necessary because of an obscure BC bug where creating a Pkcs12Store without .Load (code above using "Set" methods only) does not set all internal hashtables necessary to avoid an error later + // when processing store. + MemoryStream ms = new MemoryStream(); + store.Save(ms, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray(), new Org.BouncyCastle.Security.SecureRandom()); + ms.Position = 0; + + Pkcs12Store newStore = storeBuilder.Build(); + newStore.Load(ms, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray()); + + logger.MethodExit(LogLevel.Debug); + return newStore; + } + + public List SerializeRemoteCertificateStore(Pkcs12Store certificateStore, string storePath, string storePassword, string storeProperties, IRemoteHandler remoteHandler) + { + logger.MethodEntry(LogLevel.Debug); + + LoadCustomProperties(storeProperties); + + string pemString = string.Empty; + string keyString = string.Empty; + List storeInfo = new List(); + + if (IsTrustStore) + { + foreach (string alias in certificateStore.Aliases) + { + if (certificateStore.IsKeyEntry(alias)) + throw new RemoteFileException("Cannot add a certificate with a private key to a PEM trust store."); + + CertificateConverter certConverter = CertificateConverterFactory.FromBouncyCastleCertificate(certificateStore.GetCertificate(alias).Certificate); + pemString += certConverter.ToPEM(true); + } + } + else + { + bool keyEntryProcessed = false; + foreach (string alias in certificateStore.Aliases) + { + if (keyEntryProcessed) + throw new RemoteFileException("Cannot add a new certificate to a PEM store that already contains a certificate/key entry."); + else + keyEntryProcessed = true; + + if (!certificateStore.IsKeyEntry(alias)) + throw new RemoteFileException("No private key found. Private key must be present to add entry to a non-Trust PEM certificate store."); + + AsymmetricKeyParameter privateKey = certificateStore.GetKey(alias).Key; + X509CertificateEntry[] certEntries = certificateStore.GetCertificateChain(alias); + AsymmetricKeyParameter publicKey = certEntries[0].Certificate.GetPublicKey(); + PrivateKeyConverter keyConverter = PrivateKeyConverterFactory.FromBCKeyPair(privateKey, publicKey, false); + + byte[] privateKeyBytes = string.IsNullOrEmpty(storePassword) ? keyConverter.ToPkcs8BlobUnencrypted() : keyConverter.ToPkcs8Blob(storePassword); + keyString = PemUtilities.DERToPEM(privateKeyBytes, string.IsNullOrEmpty(storePassword) ? PemUtilities.PemObjectType.PrivateKey : PemUtilities.PemObjectType.EncryptedPrivateKey); + + X509CertificateEntry[] chainEntries = certificateStore.GetCertificateChain(alias); + CertificateConverter certConverter = CertificateConverterFactory.FromBouncyCastleCertificate(chainEntries[0].Certificate); + + pemString = certConverter.ToPEM(true); + if (string.IsNullOrEmpty(SeparatePrivateKeyFilePath)) + pemString += keyString; + + if (IncludesChain) + { + for (int i = 1; i < chainEntries.Length; i++) + { + CertificateConverter chainConverter = CertificateConverterFactory.FromBouncyCastleCertificate(chainEntries[i].Certificate); + pemString += chainConverter.ToPEM(true); + } + } + } + } + + storeInfo.Add(new SerializedStoreInfo() { FilePath = storePath, Contents = Encoding.ASCII.GetBytes(pemString) }); + if (!string.IsNullOrEmpty(SeparatePrivateKeyFilePath)) + storeInfo.Add(new SerializedStoreInfo() { FilePath = SeparatePrivateKeyFilePath, Contents = Encoding.ASCII.GetBytes(keyString) }); + + logger.MethodExit(LogLevel.Debug); + + return storeInfo; + } + + private void LoadCustomProperties(string storeProperties) + { + logger.MethodEntry(LogLevel.Debug); + + dynamic properties = JsonConvert.DeserializeObject(storeProperties); + IsTrustStore = properties.IsTrustStore == null || string.IsNullOrEmpty(properties.IsTrustStore.Value) ? false : bool.Parse(properties.IsTrustStore.Value); + IncludesChain = properties.IncludesChain == null || string.IsNullOrEmpty(properties.IncludesChain.Value) ? false : bool.Parse(properties.IncludesChain.Value); + SeparatePrivateKeyFilePath = properties.SeparatePrivateKeyFilePath == null || string.IsNullOrEmpty(properties.SeparatePrivateKeyFilePath.Value) ? String.Empty : properties.SeparatePrivateKeyFilePath.Value; + + logger.MethodExit(LogLevel.Debug); + } + + private X509CertificateEntry[] GetCertificates(string certificates) + { + logger.MethodEntry(LogLevel.Debug); + + List certificateEntries = new List(); + + try + { + while (certificates.Contains(CertDelimBeg)) + { + int certStart = certificates.IndexOf(CertDelimBeg); + int certLength = certificates.IndexOf(CertDelimEnd) + CertDelimEnd.Length - certStart; + string certificate = certificates.Substring(certStart, certLength); + + CertificateConverter c2 = CertificateConverterFactory.FromPEM(Encoding.ASCII.GetBytes(certificate.Replace(CertDelimBeg, string.Empty).Replace(CertDelimEnd, string.Empty))); + X509Certificate bcCert = c2.ToBouncyCastleCertificate(); + certificateEntries.Add(new X509CertificateEntry(bcCert)); + + certificates = certificates.Substring(certStart + certLength - 1); + } + } + catch (Exception ex) + { + throw new RemoteFileException($"Error attempting to retrieve certificate chain.", ex); + } + + logger.MethodExit(LogLevel.Debug); + + return certificateEntries.ToArray(); + } + + private AsymmetricKeyEntry GetPrivateKey(string storeContents, string storePassword, IRemoteHandler remoteHandler) + { + logger.MethodEntry(LogLevel.Debug); + + if (!String.IsNullOrEmpty(SeparatePrivateKeyFilePath)) + { + storeContents = Encoding.ASCII.GetString(remoteHandler.DownloadCertificateFile(SeparatePrivateKeyFilePath)); + } + + string privateKey = string.Empty; + foreach (string begDelim in PrivateKeyDelimeters) + { + string endDelim = begDelim.Replace("BEGIN", "END"); + + int keyStart = storeContents.IndexOf(begDelim); + if (keyStart == -1) + continue; + int keyLength = storeContents.IndexOf(endDelim) + endDelim.Length - keyStart; + if (keyLength == -1) + throw new RemoteFileException("Invalid private key: No ending private key delimiter found."); + + privateKey = storeContents.Substring(keyStart, keyLength).Replace(begDelim, string.Empty).Replace(endDelim, string.Empty); + + break; + } + + if (string.IsNullOrEmpty(privateKey)) + throw new RemoteFileException("Invalid private key: No private key found."); + + PrivateKeyConverter c = PrivateKeyConverterFactory.FromPkcs8Blob(Convert.FromBase64String(privateKey), storePassword); + + logger.MethodExit(LogLevel.Debug); + + return new AsymmetricKeyEntry(c.ToBCPrivateKey()); + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/PKCS12/Inventory.cs b/RemoteFile/ImplementedStoreTypes/PKCS12/Inventory.cs new file mode 100644 index 00000000..20b7982e --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/PKCS12/Inventory.cs @@ -0,0 +1,11 @@ + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.PKCS12 +{ + public class Inventory : InventoryBase + { + internal override ICertificateStoreSerializer GetCertificateStoreSerializer() + { + return new PKCS12CertificateStoreSerializer(); + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/PKCS12/Management.cs b/RemoteFile/ImplementedStoreTypes/PKCS12/Management.cs new file mode 100644 index 00000000..9d416cb8 --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/PKCS12/Management.cs @@ -0,0 +1,11 @@ + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.PKCS12 +{ + public class Management : ManagementBase + { + internal override ICertificateStoreSerializer GetCertificateStoreSerializer() + { + return new PKCS12CertificateStoreSerializer(); + } + } +} diff --git a/RemoteFile/ImplementedStoreTypes/PKCS12/PKCS12CertificateStoreSerializer.cs b/RemoteFile/ImplementedStoreTypes/PKCS12/PKCS12CertificateStoreSerializer.cs new file mode 100644 index 00000000..44402508 --- /dev/null +++ b/RemoteFile/ImplementedStoreTypes/PKCS12/PKCS12CertificateStoreSerializer.cs @@ -0,0 +1,38 @@ +using System.IO; +using System.Collections.Generic; +using Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers; +using Keyfactor.Extensions.Orchestrator.RemoteFile.Models; + +using Org.BouncyCastle.Pkcs; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.PKCS12 +{ + class PKCS12CertificateStoreSerializer : ICertificateStoreSerializer + { + public Pkcs12Store DeserializeRemoteCertificateStore(byte[] storeContents, string storePassword, string storeProperties, IRemoteHandler remoteHandler) + { + Pkcs12StoreBuilder storeBuilder = new Pkcs12StoreBuilder(); + Pkcs12Store store = storeBuilder.Build(); + + using (MemoryStream ms = new MemoryStream(storeContents)) + { + store.Load(ms, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray()); + } + + return store; + } + + public List SerializeRemoteCertificateStore(Pkcs12Store certificateStore, string storePath, string storePassword, string storeProperties, IRemoteHandler remoteHandler) + { + using (MemoryStream outStream = new MemoryStream()) + { + certificateStore.Save(outStream, string.IsNullOrEmpty(storePassword) ? new char[0] : storePassword.ToCharArray(), new Org.BouncyCastle.Security.SecureRandom()); + + List storeInfo = new List(); + storeInfo.Add(new SerializedStoreInfo() { FilePath = storePath, Contents = outStream.ToArray() }); + + return storeInfo; + } + } + } +} diff --git a/RemoteFile/InventoryBase.cs b/RemoteFile/InventoryBase.cs new file mode 100644 index 00000000..94f20a8a --- /dev/null +++ b/RemoteFile/InventoryBase.cs @@ -0,0 +1,96 @@ +using System; +using System.Collections.Generic; +using System.Security.Cryptography.X509Certificates; + +using Keyfactor.Orchestrators.Extensions; +using Keyfactor.Orchestrators.Common.Enums; +using Keyfactor.Logging; +using Keyfactor.Extensions.Orchestrator.RemoteFile.Models; + +using Microsoft.Extensions.Logging; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile +{ + public abstract class InventoryBase : RemoteFileJobTypeBase, IInventoryJobExtension + { + protected ILogger logger; + + public string ExtensionName => string.Empty; + + RemoteCertificateStore certificateStore = new RemoteCertificateStore(); + + public JobResult ProcessJob(InventoryJobConfiguration config, SubmitInventoryUpdate submitInventory) + { + ILogger logger = LogHandler.GetClassLogger(this.GetType()); + logger.LogDebug($"Begin {config.Capability} for job id {config.JobId}..."); + logger.LogDebug($"Server: { config.CertificateStoreDetails.ClientMachine }"); + logger.LogDebug($"Store Path: { config.CertificateStoreDetails.StorePath }"); + logger.LogDebug($"Job Properties:"); + foreach (KeyValuePair keyValue in config.JobProperties ?? new Dictionary()) + { + logger.LogDebug($" {keyValue.Key}: {keyValue.Value}"); + } + + ICertificateStoreSerializer certificateStoreSerializer = GetCertificateStoreSerializer(); + List inventoryItems = new List(); + + try + { + ApplicationSettings.Initialize(this.GetType().Assembly.Location); + certificateStore = new RemoteCertificateStore(config.CertificateStoreDetails.ClientMachine, config.ServerUsername, config.ServerPassword, config.CertificateStoreDetails.StorePath, config.CertificateStoreDetails.StorePassword, config.JobProperties); + certificateStore.LoadCertificateStore(certificateStoreSerializer, config.CertificateStoreDetails.Properties); + + List collections = certificateStore.GetCertificateChains(); + + logger.LogDebug($"Format returned certificates BEGIN"); + foreach (X509Certificate2Collection collection in collections) + { + if (collection.Count == 0) + continue; + + X509Certificate2Ext issuedCertificate = (X509Certificate2Ext)collection[0]; + + List certChain = new List(); + foreach (X509Certificate2 certificate in collection) + { + certChain.Add(Convert.ToBase64String(certificate.Export(X509ContentType.Cert))); + logger.LogDebug(Convert.ToBase64String(certificate.Export(X509ContentType.Cert))); + } + + inventoryItems.Add(new CurrentInventoryItem() + { + ItemStatus = OrchestratorInventoryItemStatus.Unknown, + Alias = string.IsNullOrEmpty(issuedCertificate.FriendlyNameExt) ? issuedCertificate.Thumbprint : issuedCertificate.FriendlyNameExt, + PrivateKeyEntry = issuedCertificate.HasPrivateKey, + UseChainLevel = collection.Count > 1, + Certificates = certChain.ToArray() + }); + } + logger.LogDebug($"Format returned certificates END"); + } + catch (Exception ex) + { + logger.LogError($"Exception for {config.Capability}: {RemoteFileException.FlattenExceptionMessages(ex, string.Empty)} for job id {config.JobId}"); + return new JobResult() { Result = OrchestratorJobStatusJobResult.Failure, JobHistoryId = config.JobHistoryId, FailureMessage = RemoteFileException.FlattenExceptionMessages(ex, $"Site {config.CertificateStoreDetails.StorePath} on server {config.CertificateStoreDetails.ClientMachine}:") }; + } + finally + { + if (certificateStore.RemoteHandler != null) + certificateStore.Terminate(); + } + + try + { + submitInventory.Invoke(inventoryItems); + logger.LogDebug($"...End {config.Capability} job for job id {config.JobId}"); + return new JobResult() { Result = OrchestratorJobStatusJobResult.Success, JobHistoryId = config.JobHistoryId }; + } + catch (Exception ex) + { + string errorMessage = RemoteFileException.FlattenExceptionMessages(ex, string.Empty); + logger.LogError($"Exception returning certificates for {config.Capability}: {errorMessage} for job id {config.JobId}"); + return new JobResult() { Result = OrchestratorJobStatusJobResult.Failure, JobHistoryId = config.JobHistoryId, FailureMessage = RemoteFileException.FlattenExceptionMessages(ex, $"Site {config.CertificateStoreDetails.StorePath} on server {config.CertificateStoreDetails.ClientMachine}:") }; + } + } + } +} diff --git a/RemoteFile/ManagementBase.cs b/RemoteFile/ManagementBase.cs new file mode 100644 index 00000000..15dbb341 --- /dev/null +++ b/RemoteFile/ManagementBase.cs @@ -0,0 +1,129 @@ +// Copyright 2021 Keyfactor +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +using System; +using System.Collections.Generic; +using System.Security.Cryptography.X509Certificates; +using System.Threading; + +using Keyfactor.Logging; +using Keyfactor.Orchestrators.Extensions; +using Keyfactor.Orchestrators.Common.Enums; + +using Microsoft.Extensions.Logging; + +using Newtonsoft.Json; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile +{ + public abstract class ManagementBase : RemoteFileJobTypeBase, IManagementJobExtension + { + static Mutex mutex = new Mutex(false, "ModifyStore"); + + public string ExtensionName => ""; + + internal RemoteCertificateStore certificateStore = new RemoteCertificateStore(); + + public JobResult ProcessJob(ManagementJobConfiguration config) + { + ILogger logger = LogHandler.GetClassLogger(this.GetType()); + logger.LogDebug($"Begin {config.Capability} for job id {config.JobId}..."); + logger.LogDebug($"Server: { config.CertificateStoreDetails.ClientMachine }"); + logger.LogDebug($"Store Path: { config.CertificateStoreDetails.StorePath }"); + logger.LogDebug($"Job Properties:"); + foreach (KeyValuePair keyValue in config.JobProperties == null ? new Dictionary() : config.JobProperties) + { + logger.LogDebug($" {keyValue.Key}: {keyValue.Value}"); + } + + ICertificateStoreSerializer certificateStoreSerializer = GetCertificateStoreSerializer(); + + try + { + mutex.WaitOne(); + + ApplicationSettings.Initialize(this.GetType().Assembly.Location); + certificateStore = new RemoteCertificateStore(config.CertificateStoreDetails.ClientMachine, config.ServerUsername, config.ServerPassword, config.CertificateStoreDetails.StorePath, config.CertificateStoreDetails.StorePassword, config.JobProperties); + + switch (config.OperationType) + { + case CertStoreOperationType.Add: + logger.LogDebug($"BEGIN create Operation for {config.CertificateStoreDetails.StorePath} on {config.CertificateStoreDetails.ClientMachine}."); + if (!certificateStore.DoesStoreExist()) + { + if (ApplicationSettings.CreateStoreIfMissing) + CreateStore(config); + else + throw new RemoteFileException($"Certificate store {config.CertificateStoreDetails.StorePath} does not exist on server {config.CertificateStoreDetails.ClientMachine}."); + } + certificateStore.LoadCertificateStore(certificateStoreSerializer, config.CertificateStoreDetails.Properties); + certificateStore.AddCertificate((config.JobCertificate.Alias ?? new X509Certificate2(Convert.FromBase64String(config.JobCertificate.Contents), config.JobCertificate.PrivateKeyPassword).Thumbprint), config.JobCertificate.Contents, config.Overwrite, config.JobCertificate.PrivateKeyPassword); + certificateStore.SaveCertificateStore(certificateStoreSerializer.SerializeRemoteCertificateStore(certificateStore.GetCertificateStore(), config.CertificateStoreDetails.StorePath, config.CertificateStoreDetails.StorePassword, config.CertificateStoreDetails.Properties, certificateStore.RemoteHandler)); + + logger.LogDebug($"END create Operation for {config.CertificateStoreDetails.StorePath} on {config.CertificateStoreDetails.ClientMachine}."); + break; + + case CertStoreOperationType.Remove: + logger.LogDebug($"BEGIN Delete Operation for {config.CertificateStoreDetails.StorePath} on {config.CertificateStoreDetails.ClientMachine}."); + if (!certificateStore.DoesStoreExist()) + { + throw new RemoteFileException($"Certificate store {config.CertificateStoreDetails.StorePath} does not exist on server {config.CertificateStoreDetails.ClientMachine}."); + } + else + { + certificateStore.LoadCertificateStore(certificateStoreSerializer, config.CertificateStoreDetails.Properties); + certificateStore.DeleteCertificateByAlias(config.JobCertificate.Alias); + certificateStore.SaveCertificateStore(certificateStoreSerializer.SerializeRemoteCertificateStore(certificateStore.GetCertificateStore(), config.CertificateStoreDetails.StorePath, config.CertificateStoreDetails.StorePassword, config.CertificateStoreDetails.Properties, certificateStore.RemoteHandler)); + } + logger.LogDebug($"END Delete Operation for {config.CertificateStoreDetails.StorePath} on {config.CertificateStoreDetails.ClientMachine}."); + break; + + case CertStoreOperationType.Create: + logger.LogDebug($"BEGIN create Operation for {config.CertificateStoreDetails.StorePath} on {config.CertificateStoreDetails.ClientMachine}."); + if (certificateStore.DoesStoreExist()) + { + throw new RemoteFileException($"Certificate store {config.CertificateStoreDetails.StorePath} already exists."); + } + else + { + CreateStore(config); + } + logger.LogDebug($"END create Operation for {config.CertificateStoreDetails.StorePath} on {config.CertificateStoreDetails.ClientMachine}."); + break; + + default: + return new JobResult() { Result = OrchestratorJobStatusJobResult.Failure, JobHistoryId = config.JobHistoryId, FailureMessage = $"Site {config.CertificateStoreDetails.StorePath} on server {config.CertificateStoreDetails.ClientMachine}: Unsupported operation: {config.OperationType.ToString()}" }; + } + } + catch (Exception ex) + { + logger.LogError($"Exception for {config.Capability}: {RemoteFileException.FlattenExceptionMessages(ex, string.Empty)} for job id {config.JobId}"); + return new JobResult() { Result = OrchestratorJobStatusJobResult.Failure, JobHistoryId = config.JobHistoryId, FailureMessage = RemoteFileException.FlattenExceptionMessages(ex, $"Site {config.CertificateStoreDetails.StorePath} on server {config.CertificateStoreDetails.ClientMachine}:") }; + } + finally + { + mutex.ReleaseMutex(); + + if (certificateStore.RemoteHandler != null) + certificateStore.Terminate(); + } + + logger.LogDebug($"...End {config.Capability} job for job id {config.JobId}"); + return new JobResult() { Result = OrchestratorJobStatusJobResult.Success, JobHistoryId = config.JobHistoryId }; + } + + private void CreateStore(ManagementJobConfiguration config) + { + dynamic properties = JsonConvert.DeserializeObject(config.CertificateStoreDetails.Properties.ToString()); + string linuxFilePermissions = properties.LinuxFilePermissionsOnStoreCreation == null || string.IsNullOrEmpty(properties.LinuxFilePermissionsOnStoreCreation.Value) ? + ApplicationSettings.DefaultLinuxPermissionsOnStoreCreation : + properties.LinuxFilePermissionsOnStoreCreation.Value; + + certificateStore.CreateCertificateStore(config.CertificateStoreDetails.StorePath, linuxFilePermissions); + } + } +} diff --git a/RemoteFile/Models/SerializedStoreInfo.cs b/RemoteFile/Models/SerializedStoreInfo.cs new file mode 100644 index 00000000..6f8459c8 --- /dev/null +++ b/RemoteFile/Models/SerializedStoreInfo.cs @@ -0,0 +1,11 @@ +using System.Security.Cryptography.X509Certificates; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.Models +{ + class SerializedStoreInfo : X509Certificate2 + { + public string FilePath { get; set; } + + public byte[] Contents { get; set; } + } +} diff --git a/RemoteFile/Models/X509Certificate2Ext.cs b/RemoteFile/Models/X509Certificate2Ext.cs new file mode 100644 index 00000000..257ec39a --- /dev/null +++ b/RemoteFile/Models/X509Certificate2Ext.cs @@ -0,0 +1,13 @@ +using System.Security.Cryptography.X509Certificates; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.Models +{ + class X509Certificate2Ext : X509Certificate2 + { + public string FriendlyNameExt { get; set; } + + public new bool HasPrivateKey { get; set; } + + public X509Certificate2Ext(byte[] bytes): base(bytes) { } + } +} diff --git a/RemoteFile/RemoteCertificateStore.cs b/RemoteFile/RemoteCertificateStore.cs new file mode 100644 index 00000000..c84543fe --- /dev/null +++ b/RemoteFile/RemoteCertificateStore.cs @@ -0,0 +1,453 @@ +// Copyright 2021 Keyfactor +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Security.Cryptography.X509Certificates; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading; + +using Microsoft.Extensions.Logging; + +using Org.BouncyCastle.Pkcs; +using Org.BouncyCastle.Security; + +using Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers; +using Keyfactor.Extensions.Orchestrator.RemoteFile.Models; +using Keyfactor.Logging; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile +{ + internal class RemoteCertificateStore + { + private const string NO_EXTENSION = "noext"; + private const string FULL_SCAN = "fullscan"; + + internal enum ServerTypeEnum + { + Linux, + Windows + } + + internal string Server { get; set; } + internal string ServerId { get; set; } + internal string ServerPassword { get; set; } + internal string StorePath { get; set; } + internal string StoreFileName { get; set; } + internal string StorePassword { get; set; } + internal IRemoteHandler RemoteHandler { get; set; } + internal ServerTypeEnum ServerType { get; set; } + internal List DiscoveredStores { get; set; } + internal string UploadFilePath { get; set; } + + private Pkcs12Store CertificateStore; + private ILogger logger; + + + internal RemoteCertificateStore() { } + + internal RemoteCertificateStore(string server, string serverId, string serverPassword, string storeFileAndPath, string storePassword, Dictionary jobProperties) + { + logger = LogHandler.GetClassLogger(this.GetType()); + logger.MethodEntry(LogLevel.Debug); + + Server = server; + + PathFile fullPath = SplitStorePathFile(storeFileAndPath); + StorePath = fullPath.Path; + StoreFileName = fullPath.File; + + ServerId = serverId; + ServerPassword = serverPassword ?? string.Empty; + StorePassword = storePassword; + ServerType = StorePath.Substring(0, 1) == "/" ? ServerTypeEnum.Linux : ServerTypeEnum.Windows; + UploadFilePath = !string.IsNullOrEmpty(ApplicationSettings.SeparateUploadFilePath) && ServerType == ServerTypeEnum.Linux ? ApplicationSettings.SeparateUploadFilePath : StorePath; + logger.LogDebug($"UploadFilePath: {UploadFilePath}"); + + if (!IsStorePathValid()) + { + logger.LogDebug("Store path not valid"); + string partialMessage = ServerType == ServerTypeEnum.Windows ? @"'\', ':', " : string.Empty; + throw new RemoteFileException($"PKCS12 store path {storeFileAndPath} is invalid. Only alphanumeric, '.', '/', {partialMessage}'-', and '_' characters are allowed in the store path."); + } + logger.LogDebug("Store path valid"); + + Initialize(); + + logger.MethodExit(LogLevel.Debug); + } + + internal RemoteCertificateStore(string server, string serverId, string serverPassword, ServerTypeEnum serverType) + { + logger = LogHandler.GetClassLogger(this.GetType()); + logger.MethodEntry(LogLevel.Debug); + + Server = server; + ServerId = serverId; + ServerPassword = serverPassword ?? string.Empty; + ServerType = serverType; + + Initialize(); + + logger.MethodExit(LogLevel.Debug); + } + + internal void LoadCertificateStore(ICertificateStoreSerializer certificateStoreSerializer, string storeProperties) + { + logger.MethodEntry(LogLevel.Debug); + + CertificateStore = new Pkcs12Store(); + + byte[] byteContents = RemoteHandler.DownloadCertificateFile(StorePath + StoreFileName); + if (byteContents.Length < 5) + return; + + CertificateStore = certificateStoreSerializer.DeserializeRemoteCertificateStore(byteContents, StorePassword, storeProperties, RemoteHandler); + + logger.MethodExit(LogLevel.Debug); + } + + internal Pkcs12Store GetCertificateStore() + { + logger.MethodEntry(LogLevel.Debug); + logger.MethodExit(LogLevel.Debug); + + return CertificateStore; + } + + internal void Terminate() + { + logger.MethodEntry(LogLevel.Debug); + + if (RemoteHandler != null) + RemoteHandler.Terminate(); + + logger.MethodExit(LogLevel.Debug); + } + + internal List FindStores(string[] paths, string[] extensions, string[] files, bool includeSymLinks) + { + logger.MethodEntry(LogLevel.Debug); + logger.MethodExit(LogLevel.Debug); + + if (DiscoveredStores != null) + return DiscoveredStores; + + return ServerType == ServerTypeEnum.Linux ? FindStoresLinux(paths, extensions, files, includeSymLinks) : FindStoresWindows(paths, extensions, files); + } + + internal List GetCertificateChains() + { + logger.MethodEntry(LogLevel.Debug); + + List certificateChains = new List(); + + foreach(string alias in CertificateStore.Aliases) + { + X509Certificate2Collection chain = new X509Certificate2Collection(); + X509CertificateEntry[] entries; + + if (CertificateStore.IsKeyEntry(alias)) + { + entries = CertificateStore.GetCertificateChain(alias); + } + else + { + X509CertificateEntry entry = CertificateStore.GetCertificate(alias); + entries = new X509CertificateEntry[] { entry }; + } + + foreach(X509CertificateEntry entry in entries) + { + X509Certificate2Ext cert = new X509Certificate2Ext(entry.Certificate.GetEncoded()); + cert.FriendlyNameExt = alias; + cert.HasPrivateKey = CertificateStore.IsKeyEntry(alias); + chain.Add(cert); + } + + certificateChains.Add(chain); + } + + logger.MethodExit(LogLevel.Debug); + + return certificateChains; + } + + internal void DeleteCertificateByAlias(string alias) + { + logger.MethodEntry(LogLevel.Debug); + + try + { + byte[] byteContents = RemoteHandler.DownloadCertificateFile(StorePath + StoreFileName); + + using (MemoryStream stream = new MemoryStream(byteContents)) + { + if (stream.Length == 0) + { + throw new RemoteFileException($"Alias {alias} does not exist in certificate store {StorePath + StoreFileName}."); + } + + if (!CertificateStore.ContainsAlias(alias)) + { + throw new RemoteFileException($"Alias {alias} does not exist in certificate store {StorePath + StoreFileName}."); + } + + CertificateStore.DeleteEntry(alias); + + using (MemoryStream outStream = new MemoryStream()) + { + CertificateStore.Save(outStream, string.IsNullOrEmpty(StorePassword) ? new char[0] : StorePassword.ToCharArray(), new Org.BouncyCastle.Security.SecureRandom()); + } + } + } + catch (Exception ex) + { + throw new RemoteFileException($"Error attempting to remove certficate for store path={StorePath}, file name={StoreFileName}.", ex); + } + + logger.MethodExit(LogLevel.Debug); + } + + internal void CreateCertificateStore(string storePath, string linuxFilePermissions) + { + logger.MethodEntry(LogLevel.Debug); + + RemoteHandler.CreateEmptyStoreFile(storePath, linuxFilePermissions); + + logger.MethodExit(LogLevel.Debug); + } + + internal void AddCertificate(string alias, string certificateEntry, bool overwrite, string pfxPassword) + { + logger.MethodEntry(LogLevel.Debug); + + try + { + Pkcs12StoreBuilder storeBuilder = new Pkcs12StoreBuilder(); + Pkcs12Store certs = storeBuilder.Build(); + + byte[] newCertBytes = Convert.FromBase64String(certificateEntry); + + Pkcs12Store newEntry = storeBuilder.Build(); + + X509Certificate2 cert = new X509Certificate2(newCertBytes, pfxPassword, X509KeyStorageFlags.Exportable); + byte[] binaryCert = cert.Export(X509ContentType.Pkcs12, pfxPassword); + + using (MemoryStream ms = new MemoryStream(string.IsNullOrEmpty(pfxPassword) ? binaryCert : newCertBytes)) + { + newEntry.Load(ms, string.IsNullOrEmpty(pfxPassword) ? new char[0] : pfxPassword.ToCharArray()); + } + + if (CertificateStore.ContainsAlias(alias) && !overwrite) + { + throw new RemoteFileException($"Alias {alias} already exists in store {StorePath + StoreFileName} and overwrite is set to False. Please try again with overwrite set to True if you wish to replace this entry."); + } + + string checkAliasExists = string.Empty; + foreach (string newEntryAlias in newEntry.Aliases) + { + if (!newEntry.IsKeyEntry(newEntryAlias)) + continue; + + checkAliasExists = newEntryAlias; + + if (CertificateStore.ContainsAlias(alias)) + { + CertificateStore.DeleteEntry(alias); + } + CertificateStore.SetKeyEntry(alias, newEntry.GetKey(newEntryAlias), newEntry.GetCertificateChain(newEntryAlias)); + } + + if (string.IsNullOrEmpty(checkAliasExists)) + { + Org.BouncyCastle.X509.X509Certificate bcCert = DotNetUtilities.FromX509Certificate(cert); + X509CertificateEntry bcEntry = new X509CertificateEntry(bcCert); + if (CertificateStore.ContainsAlias(alias)) + { + CertificateStore.DeleteEntry(alias); + } + CertificateStore.SetCertificateEntry(alias, bcEntry); + } + + using (MemoryStream outStream = new MemoryStream()) + { + CertificateStore.Save(outStream, string.IsNullOrEmpty(StorePassword) ? new char[0] : StorePassword.ToCharArray(), new Org.BouncyCastle.Security.SecureRandom()); + } + } + catch (Exception ex) + { + throw new RemoteFileException($"Error attempting to add certficate for store path={StorePath}, file name={StoreFileName}.", ex); + } + + logger.MethodExit(LogLevel.Debug); + } + + internal void SaveCertificateStore(List storeInfo) + { + logger.MethodEntry(LogLevel.Debug); + + foreach(SerializedStoreInfo fileInfo in storeInfo) + { + PathFile pathFile = SplitStorePathFile(fileInfo.FilePath); + RemoteHandler.UploadCertificateFile(pathFile.Path, pathFile.File, fileInfo.Contents); + } + + logger.MethodExit(LogLevel.Debug); + } + + internal bool DoesStoreExist() + { + logger.MethodEntry(LogLevel.Debug); + logger.MethodExit(LogLevel.Debug); + + return RemoteHandler.DoesFileExist(StorePath + StoreFileName); + } + + private void Initialize() + { + logger.MethodEntry(LogLevel.Debug); + + if (ServerType == ServerTypeEnum.Linux) + RemoteHandler = new SSHHandler(Server, ServerId, ServerPassword); + else + RemoteHandler = new WinRMHandler(Server, ServerId, ServerPassword); + + RemoteHandler.Initialize(); + + logger.MethodExit(LogLevel.Debug); + } + + private bool IsStorePathValid() + { + logger.MethodEntry(LogLevel.Debug); + + Regex regex = new Regex(ServerType == ServerTypeEnum.Linux ? $@"^[\d\s\w-_/.]*$" : $@"^[\d\s\w-_/.:\\\\]*$"); + + logger.MethodExit(LogLevel.Debug); + + return regex.IsMatch(StorePath + StoreFileName); + } + + private List FindStoresLinux(string[] paths, string[] extensions, string[] fileNames, bool includeSymLinks) + { + logger.MethodEntry(LogLevel.Debug); + + try + { + string concatPaths = string.Join(" ", paths); + string command = $"find {concatPaths} "; + if (!includeSymLinks) + command += " -type f "; + + foreach (string extension in extensions) + { + foreach (string fileName in fileNames) + { + command += (command.IndexOf("-iname") == -1 ? string.Empty : "-or "); + command += $"-iname '{fileName.Trim()}"; + if (extension.ToLower() == NO_EXTENSION) + command += $"' ! -iname '*.*' "; + else + command += $".{extension.Trim()}' "; + } + } + + string result = string.Empty; + //if (extensions.Any(p => p.ToLower() != NO_EXTENSION)) + result = RemoteHandler.RunCommand(command, null, ApplicationSettings.UseSudo, null); + + logger.MethodExit(LogLevel.Debug); + + return (result.Split(new char[] { '\n' }, StringSplitOptions.RemoveEmptyEntries)).ToList(); + } + catch (Exception ex) + { + throw new RemoteFileException($"Error attempting to find certificate stores for path={string.Join(" ", paths)}.", ex); + } + } + + private List FindStoresWindows(string[] paths, string[] extensions, string[] fileNames) + { + logger.MethodEntry(LogLevel.Debug); + + List results = new List(); + StringBuilder concatFileNames = new StringBuilder(); + + if (paths[0] == FULL_SCAN) + { + paths = GetAvailableDrives(); + for (int i = 0; i < paths.Length; i++) + paths[i] += "/"; + } + + foreach (string path in paths) + { + foreach (string extension in extensions) + { + foreach (string fileName in fileNames) + concatFileNames.Append($",{fileName}.{extension}"); + } + + string command = $"(Get-ChildItem -Path {FormatPath(path)} -Recurse -ErrorAction SilentlyContinue -Include {concatFileNames.ToString().Substring(1)}).fullname"; + string result = RemoteHandler.RunCommand(command, null, false, null); + results.AddRange(result.Split(new string[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries).ToList()); + } + + logger.MethodExit(LogLevel.Debug); + + return results; + } + + private string[] GetAvailableDrives() + { + logger.MethodEntry(LogLevel.Debug); + + string command = @"Get-WmiObject Win32_Logicaldisk -Filter ""DriveType = '3'"" | % {$_.DeviceId}"; + string result = RemoteHandler.RunCommand(command, null, false, null); + + logger.MethodExit(LogLevel.Debug); + + return result.Split(new string[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries); + } + + private PathFile SplitStorePathFile(string pathFileName) + { + logger.MethodEntry(LogLevel.Debug); + + try + { + string workingPathFileName = pathFileName.Replace(@"\", @"/"); + int separatorIndex = workingPathFileName.LastIndexOf(@"/"); + + logger.MethodExit(LogLevel.Debug); + return new PathFile() { Path = pathFileName.Substring(0, separatorIndex + 1), File = pathFileName.Substring(separatorIndex + 1) }; + } + catch (Exception ex) + { + throw new RemoteFileException($"Error attempting to parse certficate store path={StorePath}, file name={StoreFileName}.", ex); + } + } + + private string FormatPath(string path) + { + logger.MethodEntry(LogLevel.Debug); + logger.MethodExit(LogLevel.Debug); + + return path + (path.Substring(path.Length - 1) == @"\" ? string.Empty : @"\"); + } + } + + class PathFile + { + public string Path { get; set; } + public string File { get; set; } + } +} \ No newline at end of file diff --git a/RemoteFile/RemoteFile.csproj b/RemoteFile/RemoteFile.csproj new file mode 100644 index 00000000..18a28bb2 --- /dev/null +++ b/RemoteFile/RemoteFile.csproj @@ -0,0 +1,29 @@ + + + + false + netcoreapp3.1 + true + + + + + + + + + + + + + + + + + + + + + + + diff --git a/RemoteFile/RemoteFileJobTypeBase.cs b/RemoteFile/RemoteFileJobTypeBase.cs new file mode 100644 index 00000000..6fdfb685 --- /dev/null +++ b/RemoteFile/RemoteFileJobTypeBase.cs @@ -0,0 +1,11 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile +{ + public abstract class RemoteFileJobTypeBase + { + internal abstract ICertificateStoreSerializer GetCertificateStoreSerializer(); + } +} diff --git a/RemoteFile/RemoteHandlers/BaseRemoteHandler.cs b/RemoteFile/RemoteHandlers/BaseRemoteHandler.cs new file mode 100644 index 00000000..e343aa0d --- /dev/null +++ b/RemoteFile/RemoteHandlers/BaseRemoteHandler.cs @@ -0,0 +1,42 @@ +// Copyright 2021 Keyfactor +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +using Keyfactor.Logging; + +using Microsoft.Extensions.Logging; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers +{ + abstract class BaseRemoteHandler : IRemoteHandler + { + internal ILogger _logger; + internal const string KEYTOOL_ERROR = "password was incorrect"; + internal const string PASSWORD_MASK_VALUE = "[PASSWORD]"; + internal const int PASSWORD_LENGTH_MAX = 100; + + public string Server { get; set; } + + public BaseRemoteHandler() + { + _logger = LogHandler.GetClassLogger(this.GetType()); + } + + public abstract void Initialize(); + + public abstract void Terminate(); + + public abstract string RunCommand(string commandText, object[] arguments, bool withSudo, string[] passwordsToMaskInLog); + + public abstract void UploadCertificateFile(string path, string fileName, byte[] certBytes); + + public abstract byte[] DownloadCertificateFile(string path); + + public abstract void CreateEmptyStoreFile(string path, string linuxFilePermissions); + + public abstract bool DoesFileExist(string path); + } +} diff --git a/RemoteFile/RemoteHandlers/IRemoteHandler.cs b/RemoteFile/RemoteHandlers/IRemoteHandler.cs new file mode 100644 index 00000000..fa9c797d --- /dev/null +++ b/RemoteFile/RemoteHandlers/IRemoteHandler.cs @@ -0,0 +1,30 @@ +// Copyright 2021 Keyfactor +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers +{ + /// + /// Defines the interface that must be implemented by the method used to send data across the wire (i.e. SSH or WinRM via PS) + /// Currently with the dependency on the SSH class, need to look into refactoring to the inerface to allow SSH or WimRM + /// + interface IRemoteHandler + { + void Initialize(); + + void Terminate(); + + string RunCommand(string commandText, object[] arguments, bool withSudo, string[] passwordsToMaskInLog); + + void UploadCertificateFile(string path, string fileName, byte[] certBytes); + + byte[] DownloadCertificateFile(string path); + + void CreateEmptyStoreFile(string path, string linuxFilePermissions); + + bool DoesFileExist(string path); + } +} diff --git a/RemoteFile/RemoteHandlers/SSHHandler.cs b/RemoteFile/RemoteHandlers/SSHHandler.cs new file mode 100644 index 00000000..e6ca3ced --- /dev/null +++ b/RemoteFile/RemoteHandlers/SSHHandler.cs @@ -0,0 +1,391 @@ +// Copyright 2021 Keyfactor +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Security.Cryptography; +using System.Text.RegularExpressions; +using System.Text; + +using Renci.SshNet; + +using Microsoft.Extensions.Logging; + +using Keyfactor.Logging; +using Keyfactor.PKI.PrivateKeys; +using Keyfactor.PKI.PEM; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers +{ + class SSHHandler : BaseRemoteHandler + { + private const string LINUX_PERMISSION_REGEXP = "^[0-7]{3}$"; + private ConnectionInfo Connection { get; set; } + + private SshClient sshClient; + + internal SSHHandler(string server, string serverLogin, string serverPassword) + { + _logger.MethodEntry(LogLevel.Debug); + + Server = server; + + List authenticationMethods = new List(); + if (serverPassword.Length < PASSWORD_LENGTH_MAX) + { + authenticationMethods.Add(new PasswordAuthenticationMethod(serverLogin, serverPassword)); + } + else + { + try + { + using (MemoryStream ms = new MemoryStream(Encoding.ASCII.GetBytes(FormatRSAPrivateKey(serverPassword)))) + { + authenticationMethods.Add(new PrivateKeyAuthenticationMethod(serverLogin, new PrivateKeyFile[] { new PrivateKeyFile(ms) })); + } + } + catch (Exception) + { + using (MemoryStream ms = new MemoryStream(Encoding.ASCII.GetBytes(ConvertToPKCS1(serverPassword)))) + { + authenticationMethods.Add(new PrivateKeyAuthenticationMethod(serverLogin, new PrivateKeyFile[] { new PrivateKeyFile(ms) })); + } + } + + } + + Connection = new ConnectionInfo(server, serverLogin, authenticationMethods.ToArray()); + + _logger.MethodExit(LogLevel.Debug); + } + + public override void Initialize() + { + _logger.MethodEntry(LogLevel.Debug); + + sshClient = new SshClient(Connection); + sshClient.Connect(); + + _logger.MethodExit(LogLevel.Debug); + } + + public override void Terminate() + { + _logger.MethodEntry(LogLevel.Debug); + + sshClient.Disconnect(); + sshClient.Dispose(); + + _logger.MethodExit(LogLevel.Debug); + } + + public override string RunCommand(string commandText, object[] arguments, bool withSudo, string[] passwordsToMaskInLog) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"RunCommand: {commandText}"); + + string sudo = $"sudo -i -S "; + string echo = $"echo -e '\n' | "; + + try + { + if (withSudo) + commandText = sudo + commandText; + + commandText = echo + commandText; + + string displayCommand = commandText; + if (passwordsToMaskInLog != null) + { + foreach (string password in passwordsToMaskInLog) + displayCommand = displayCommand.Replace(password, PASSWORD_MASK_VALUE); + } + + using (SshCommand command = sshClient.CreateCommand($"{commandText}")) + { + _logger.LogDebug($"RunCommand: {displayCommand}"); + command.Execute(); + _logger.LogDebug($"SSH Results: {displayCommand}::: {command.Result}::: {command.Error}"); + + if (command.Result.ToLower().Contains(KEYTOOL_ERROR)) + throw new ApplicationException(command.Result); + + _logger.MethodExit(LogLevel.Debug); + + return command.Result; + } + } + catch (Exception ex) + { + _logger.LogError($"Exception during RunCommand...{RemoteFileException.FlattenExceptionMessages(ex, ex.Message)}"); + throw ex; + } + } + + public override void UploadCertificateFile(string path, string fileName, byte[] certBytes) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"UploadCertificateFile: {path}{fileName}"); + + string uploadPath = path+fileName; + + if (!string.IsNullOrEmpty(ApplicationSettings.SeparateUploadFilePath)) + { + uploadPath = ApplicationSettings.SeparateUploadFilePath + fileName; + } + + bool scpError = false; + + if (ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.Both || ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.SCP) + { + using (ScpClient client = new ScpClient(Connection)) + { + try + { + _logger.LogDebug($"SCP connection attempt to {Connection.Host} using login {Connection.Username} and connection method {Connection.AuthenticationMethods[0].Name}"); + client.Connect(); + + using (MemoryStream stream = new MemoryStream(certBytes)) + { + client.Upload(stream, FormatFTPPath(uploadPath)); + } + } + catch (Exception ex) + { + scpError = true; + _logger.LogError("Exception during SCP upload..."); + _logger.LogError($"Upload Exception: {RemoteFileException.FlattenExceptionMessages(ex, ex.Message)}"); + if (ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.Both) + _logger.LogDebug($"SCP upload failed. Attempting with SFTP protocol..."); + else + throw ex; + } + finally + { + client.Disconnect(); + } + } + } + + if ((ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.Both && scpError) || ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.SFTP) + { + using (SftpClient client = new SftpClient(Connection)) + { + try + { + _logger.LogDebug($"SFTP connection attempt to {Connection.Host} using login {Connection.Username} and connection method {Connection.AuthenticationMethods[0].Name}"); + client.Connect(); + + using (MemoryStream stream = new MemoryStream(certBytes)) + { + client.UploadFile(stream, FormatFTPPath(uploadPath)); + } + } + catch (Exception ex) + { + _logger.LogError("Exception during SFTP upload..."); + _logger.LogError($"Upload Exception: {RemoteFileException.FlattenExceptionMessages(ex, ex.Message)}"); + throw ex; + } + finally + { + client.Disconnect(); + } + } + } + + if (!string.IsNullOrEmpty(ApplicationSettings.SeparateUploadFilePath)) + { + RunCommand($"cp -a {uploadPath} {path}", null, ApplicationSettings.UseSudo, null); + RunCommand($"rm {uploadPath}", null, ApplicationSettings.UseSudo, null); + } + + _logger.MethodExit(LogLevel.Debug); + } + + public override byte[] DownloadCertificateFile(string path) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"DownloadCertificateFile: {path}"); + + byte[] rtnStore = new byte[] { }; + + string downloadPath = path; + string altPathOnly = string.Empty; + string altFileNameOnly = string.Empty; + + if (!string.IsNullOrEmpty(ApplicationSettings.SeparateUploadFilePath)) + { + SplitStorePathFile(path, out altPathOnly, out altFileNameOnly); + downloadPath = ApplicationSettings.SeparateUploadFilePath + altFileNameOnly; + RunCommand($"cp {path} {downloadPath}", null, ApplicationSettings.UseSudo, null); + RunCommand($"sudo chown {Connection.Username} {path}", null, ApplicationSettings.UseSudo, null); + } + + bool scpError = false; + + if (ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.Both || ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.SCP) + { + using (ScpClient client = new ScpClient(Connection)) + { + try + { + _logger.LogDebug($"SCP connection attempt to {Connection.Host} using login {Connection.Username} and connection method {Connection.AuthenticationMethods[0].Name}"); + client.Connect(); + + using (MemoryStream stream = new MemoryStream()) + { + client.Download(FormatFTPPath(downloadPath), stream); + rtnStore = stream.ToArray(); + } + } + catch (Exception ex) + { + scpError = true; + _logger.LogError("Exception during SCP download..."); + _logger.LogError($"Upload Exception: {RemoteFileException.FlattenExceptionMessages(ex, ex.Message)}"); + if (ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.Both) + _logger.LogDebug($"SCP download failed. Attempting with SFTP protocol..."); + else + throw ex; + } + finally + { + client.Disconnect(); + } + } + } + + if ((ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.Both && scpError) || ApplicationSettings.FileTransferProtocol == ApplicationSettings.FileTransferProtocolEnum.SFTP) + { + using (SftpClient client = new SftpClient(Connection)) + { + try + { + _logger.LogDebug($"SFTP connection attempt to {Connection.Host} using login {Connection.Username} and connection method {Connection.AuthenticationMethods[0].Name}"); + client.Connect(); + + using (MemoryStream stream = new MemoryStream()) + { + client.DownloadFile(FormatFTPPath(downloadPath), stream); + rtnStore = stream.ToArray(); + } + } + catch (Exception ex) + { + _logger.LogError("Exception during SFTP download..."); + _logger.LogError($"Download Exception: {RemoteFileException.FlattenExceptionMessages(ex, ex.Message)}"); + throw ex; + } + finally + { + client.Disconnect(); + } + } + } + + if (!string.IsNullOrEmpty(ApplicationSettings.SeparateUploadFilePath)) + { + RunCommand($"rm {downloadPath}", null, ApplicationSettings.UseSudo, null); + } + + _logger.MethodExit(LogLevel.Debug); + + return rtnStore; + } + + public override void CreateEmptyStoreFile(string path, string linuxFilePermissions) + { + _logger.MethodEntry(LogLevel.Debug); + + AreLinuxPermissionsValid(linuxFilePermissions); + RunCommand($"install -m {linuxFilePermissions} /dev/null {path}", null, false, null); + + _logger.MethodExit(LogLevel.Debug); + } + + public override bool DoesFileExist(string path) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"DoesFileExist: {path}"); + + using (SftpClient client = new SftpClient(Connection)) + { + try + { + client.Connect(); + string existsPath = FormatFTPPath(path); + bool exists = client.Exists(existsPath); + + _logger.MethodExit(LogLevel.Debug); + + return exists; + } + finally + { + client.Disconnect(); + } + } + } + + public static void AreLinuxPermissionsValid(string permissions) + { + Regex regex = new Regex(LINUX_PERMISSION_REGEXP); + if (!regex.IsMatch(permissions)) + throw new RemoteFileException($"Invalid format for Linux file permissions. This value must be exactly 3 digits long with each digit between 0-7 but found {permissions} instead."); + } + + private void SplitStorePathFile(string pathFileName, out string path, out string fileName) + { + _logger.MethodEntry(LogLevel.Debug); + + try + { + int separatorIndex = pathFileName.LastIndexOf(pathFileName.Substring(0, 1) == "/" ? @"/" : @"\"); + fileName = pathFileName.Substring(separatorIndex + 1); + path = pathFileName.Substring(0, separatorIndex + 1); + } + catch (Exception ex) + { + throw new RemoteFileException($"Error attempting to parse certficate store/key path={pathFileName}.", ex); + } + + _logger.MethodEntry(LogLevel.Debug); + } + + private string FormatRSAPrivateKey(string privateKey) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.MethodExit(LogLevel.Debug); + + return privateKey.Replace(" RSA PRIVATE ", "^^^").Replace(" ", System.Environment.NewLine).Replace("^^^", " RSA PRIVATE "); + } + + private string ConvertToPKCS1(string privateKey) + { + _logger.MethodEntry(LogLevel.Debug); + + privateKey = privateKey.Replace(System.Environment.NewLine, string.Empty).Replace("-----BEGIN PRIVATE KEY-----", string.Empty).Replace("-----END PRIVATE KEY-----", string.Empty); + PrivateKeyConverter conv = PrivateKeyConverterFactory.FromPkcs8Blob(Convert.FromBase64String(privateKey), string.Empty); + RSA alg = (RSA)conv.ToNetPrivateKey(); + string pemString = PemUtilities.DERToPEM(alg.ExportRSAPrivateKey(), PemUtilities.PemObjectType.PrivateKey); + + _logger.MethodExit(LogLevel.Debug); + + return pemString.Replace("PRIVATE", "RSA PRIVATE"); + } + + private string FormatFTPPath(string path) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.MethodExit(LogLevel.Debug); + + return path.Substring(0, 1) == @"/" ? path : @"/" + path.Replace("\\", "/"); + } + } +} diff --git a/RemoteFile/RemoteHandlers/WinRMHandler.cs b/RemoteFile/RemoteHandlers/WinRMHandler.cs new file mode 100644 index 00000000..3a9a6eb0 --- /dev/null +++ b/RemoteFile/RemoteHandlers/WinRMHandler.cs @@ -0,0 +1,269 @@ +// Copyright 2021 Keyfactor +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +using System; +using System.Collections.Generic; +using System.Management.Automation; +using System.Management.Automation.Runspaces; +using System.Net; +using System.Text; + +using Microsoft.Extensions.Logging; + +using Keyfactor.Logging; + +namespace Keyfactor.Extensions.Orchestrator.RemoteFile.RemoteHandlers +{ + class WinRMHandler : BaseRemoteHandler + { + private const string IGNORED_ERROR1 = "importing keystore"; + private const string IGNORED_ERROR2 = "warning:"; + private const string IGNORED_ERROR3 = "certificate was added to keystore"; + + private Runspace runspace { get; set; } + private WSManConnectionInfo connectionInfo { get; set; } + + internal WinRMHandler(string server, string serverLogin, string serverPassword) + { + _logger.MethodEntry(LogLevel.Debug); + + Server = server; + connectionInfo = new WSManConnectionInfo(new System.Uri($"{Server}/wsman")); + if (!string.IsNullOrEmpty(serverLogin)) + { + connectionInfo.Credential = new PSCredential(serverLogin, new NetworkCredential(serverLogin, serverPassword).SecurePassword); + } + + _logger.MethodExit(LogLevel.Debug); + } + + public override void Initialize() + { + _logger.MethodEntry(LogLevel.Debug); + + try + { + if (ApplicationSettings.UseNegotiate) + { + connectionInfo.AuthenticationMechanism = AuthenticationMechanism.Negotiate; + } + runspace = RunspaceFactory.CreateRunspace(connectionInfo); + runspace.Open(); + } + + catch (Exception ex) + { + _logger.LogError($"Exception during Initialize...{RemoteFileException.FlattenExceptionMessages(ex, ex.Message)}"); + throw ex; + } + + _logger.MethodExit(LogLevel.Debug); + } + + public override void Terminate() + { + _logger.MethodEntry(LogLevel.Debug); + + runspace.Close(); + runspace.Dispose(); + + _logger.MethodExit(LogLevel.Debug); + } + + public override string RunCommand(string commandText, object[] parameters, bool withSudo, string[] passwordsToMaskInLog) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"RunCommand: {commandText}"); + + try + { + using (PowerShell ps = PowerShell.Create()) + { + ps.Runspace = runspace; + + if (commandText.ToLower().IndexOf("keytool ") > -1) + { + commandText = ($"& '{commandText}").Replace("keytool", "keytool'"); + commandText = "echo '' | " + commandText; + } + ps.AddScript(commandText); + + string displayCommand = commandText; + if (passwordsToMaskInLog != null) + { + foreach (string password in passwordsToMaskInLog) + displayCommand = displayCommand.Replace(password, PASSWORD_MASK_VALUE); + } + + if (parameters != null) + { + foreach(object parameter in parameters) + ps.AddArgument(parameter); + } + + _logger.LogDebug($"RunCommand: {displayCommand}"); + string result = FormatResult(ps.Invoke(parameters)); + + if (ps.HadErrors) + { + string errors = string.Empty; + System.Collections.ObjectModel.Collection errorRecords = ps.Streams.Error.ReadAll(); + foreach (ErrorRecord errorRecord in errorRecords) + { + string error = errorRecord.ToString(); + if (error.ToLower().StartsWith(IGNORED_ERROR1) || + error.ToLower().Contains(IGNORED_ERROR2) || + error.ToLower().Contains(IGNORED_ERROR3)) + { + errors = null; + break; + } + + errors += (error + " "); + } + + if (!string.IsNullOrEmpty(errors)) + throw new ApplicationException(errors); + } + else + _logger.LogDebug($"WinRM Results: {displayCommand}::: {result}"); + + if (result.ToLower().Contains(KEYTOOL_ERROR)) + throw new ApplicationException(result); + + _logger.MethodExit(LogLevel.Debug); + + return result; + } + } + catch (Exception ex) + { + _logger.LogError($"Exception during RunCommand...{RemoteFileException.FlattenExceptionMessages(ex, ex.Message)}"); + throw ex; + } + } + + private byte[] RunCommandBinary(string commandText) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"RunCommandBinary: {Server}"); + + byte[] rtnBytes = new byte[0]; + + try + { + using (Runspace runspace = RunspaceFactory.CreateRunspace(new WSManConnectionInfo(new System.Uri($"{Server}/wsman")))) + { + runspace.Open(); + using (PowerShell ps = PowerShell.Create()) + { + ps.Runspace = runspace; + ps.AddScript(commandText); + + _logger.LogDebug($"RunCommandBinary: {commandText}"); + System.Collections.ObjectModel.Collection psResult = ps.Invoke(); + + if (ps.HadErrors) + { + string errors = string.Empty; + System.Collections.ObjectModel.Collection errorRecords = ps.Streams.Error.ReadAll(); + foreach (ErrorRecord errorRecord in errorRecords) + errors += (errorRecord.ToString() + " "); + + throw new ApplicationException(errors); + } + else + { + if (psResult.Count > 0) + rtnBytes = (byte[])psResult[0].BaseObject; + _logger.LogDebug($"WinRM Results: {commandText}::: binary results."); + } + } + } + + _logger.MethodExit(LogLevel.Debug); + + return rtnBytes; + } + + catch (Exception ex) + { + _logger.LogError("Exception during RunCommandBinary...{RemoteFileException.FlattenExceptionMessages(ex, ex.Message)}"); + throw ex; + } + } + + public override void UploadCertificateFile(string path, string fileName, byte[] certBytes) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"UploadCertificateFile: {path} {fileName}"); + + string scriptBlock = $@" + param($contents) + + Set-Content {path + fileName} -Encoding Byte -Value $contents + "; + + object[] arguments = new object[] { certBytes }; + + RunCommand(scriptBlock, arguments, false, null); + + _logger.MethodEntry(LogLevel.Debug); + } + + public override byte[] DownloadCertificateFile(string path) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"DownloadCertificateFile: {path}"); + _logger.MethodExit(LogLevel.Debug); + + return RunCommandBinary($@"Get-Content -Path ""{path}"" -Encoding Byte -Raw"); + } + + public override void CreateEmptyStoreFile(string path, string linuxFilePermissions) + { + _logger.MethodEntry(LogLevel.Debug); + RunCommand($@"Out-File -FilePath ""{path}""", null, false, null); + _logger.MethodExit(LogLevel.Debug); + } + + public override bool DoesFileExist(string path) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.LogDebug($"DoesFileExist: {path}"); + _logger.MethodExit(LogLevel.Debug); + + return Convert.ToBoolean(RunCommand($@"Test-Path -path ""{path}""", null, false, null)); + } + + + private string FormatResult(ICollection results) + { + _logger.MethodEntry(LogLevel.Debug); + + StringBuilder rtn = new StringBuilder(); + + foreach (PSObject resultLine in results) + { + if (resultLine != null) + rtn.Append(resultLine.ToString() + System.Environment.NewLine); + } + + _logger.MethodExit(LogLevel.Debug); + + return rtn.ToString(); + } + + private string FormatFTPPath(string path) + { + _logger.MethodEntry(LogLevel.Debug); + _logger.MethodExit(LogLevel.Debug); + + return path.Substring(0, 1) == @"/" ? path : @"/" + path.Replace("\\", "/"); + } + } +} diff --git a/RemoteFile/config.json b/RemoteFile/config.json new file mode 100644 index 00000000..10eb71fb --- /dev/null +++ b/RemoteFile/config.json @@ -0,0 +1,8 @@ +{ + "UseSudo": "N", + "CreateStoreIfMissing": "N", + "UseNegotiate": "N", + "SeparateUploadFilePath": "", + "FileTransferProtocol": "SCP", + "DefaultLinuxPermissionsOnStoreCreation": "600" +} \ No newline at end of file diff --git a/RemoteFile/manifest.json b/RemoteFile/manifest.json new file mode 100644 index 00000000..759fe13f --- /dev/null +++ b/RemoteFile/manifest.json @@ -0,0 +1,42 @@ +{ + "extensions": { + "Keyfactor.Orchestrators.Extensions.IOrchestratorJobExtension": { + "CertStores.RFPkcs12.Inventory": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.PKCS12.Inventory" + }, + "CertStores.RFPkcs12.Management": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.PKCS12.Management" + }, + "CertStores.RFPkcs12.Discovery": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.Discovery" + }, + "CertStores.RFPEM.Inventory": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.PEM.Inventory" + }, + "CertStores.RFPEM.Management": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.PEM.Management" + }, + "CertStores.RFPEM.Discovery": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.Discovery" + }, + "CertStores.RFJKS.Inventory": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.JKS.Inventory" + }, + "CertStores.RFJKS.Management": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.JKS.Management" + }, + "CertStores.RFJKS.Discovery": { + "assemblypath": "RemoteFile.dll", + "TypeFullName": "Keyfactor.Extensions.Orchestrator.RemoteFile.Discovery" + } + } + } +} \ No newline at end of file diff --git a/integration-manifest.json b/integration-manifest.json new file mode 100644 index 00000000..59217fbc --- /dev/null +++ b/integration-manifest.json @@ -0,0 +1,31 @@ +{ + "$schema": "https://keyfactor.github.io/integration-manifest-schema.json", + "integration_type": "orchestrator", + "name": "Remote File", + "status": "production", + "link_github": true, + "description": "The Remote File Orchestrator allows for the remote management of file-based certificate stores. Discovery, Inventory, and Management functions are supported. The orchestrator performs operations by first converting the certificate store into a BouncyCastle PKCS12Store.", + "about": { + "orchestrator": { + "win": { + "supportsCreateStore": true, + "supportsDiscovery": true, + "supportsManagementAdd": true, + "supportsManagementRemove": true, + "supportsReenrollment": false, + "supportsInventory": true, + "platformSupport": "Unused" + }, + "linux": { + "supportsCreateStore": true, + "supportsDiscovery": true, + "supportsManagementAdd": true, + "supportsManagementRemove": true, + "supportsReenrollment": false, + "supportsInventory": true, + "platformSupport": "Unused" + } + } + } +} + diff --git a/readme_source.md b/readme_source.md new file mode 100644 index 00000000..0c3ff6a4 --- /dev/null +++ b/readme_source.md @@ -0,0 +1,231 @@ + +## Overview +The Remote File Orchestrator Extension is a multi-purpose integration that can remotely manage a variety of file-based certificate stores and can easily be extended to manage others. The certificate store types that can be managed in the current version are: +- Java Keystores of type JKS +- PKCS12 files, including, but not limited to, Java keystores of type PKCS12 +- PEM files + +While the Keyfactor Universal Orchestrator (UO) can be installed on either Windows or Linux; likewise, the Remote File Orchestrator Extension can be used to manage certificate stores residing on both Windows and Linux servers. The supported configurations of Universal Orchestrator hosts and managed orchestrated servers are shown below: + +| | UO Installed on Windows | UO Installed on Linux | +|-----|-----|------| +|Orchestrated Server on remote Windows server|✓ | | +|Orchestrated Server on remote Linux server|✓ |✓ | +|Orchestrated Server on same server as orchestrator service (Agent)|✓ |✓ | + +This orchestrator extension makes use of an SSH connection to communicate remotely with certificate stores hosted on Linux servers and WinRM to communicate with certificate stores hosted on Windows servers. +  +  +## Versioning + +The version number of a the Remote File Orchestrator Extension can be verified by right clicking on the n the Extensions/RemoteFile installation folder, selecting Properties, and then clicking on the Details tab. +  +  +## Keyfactor Version Supported + +The Remote File Orchestrator Extension has been tested against Keyfactor Universal Orchestrator version 9.5, but should work against earlier or later versions of the Keyfactor Universal Orchestrator. +  +  +## Security Considerations + +**For Linux orchestrated servers:** +1. The Remote File Orchestrator Extension makes use of a few common Linux commands when managing stores on Linux servers. If the credentials you will be connecting with need elevated access to run these commands, you must set up the user id as a sudoer with no password necessary and set the config.json "UseSudo" value to "Y" (See "Config File Setup" later in this README for more information on setting up the config.json file). The full list of these commands below: + * echo + * find + * cp + * rm + * chown + * install + +2. The Remote File Orchestrator Extension makes use of SFTP and/or SCP to transfer files to and from the orchestrated server. SFTP/SCP cannot make use of sudo, so all folders containing certificate stores will need to allow SFTP/SCP file transfer. If this is not possible, set the values in the config.json apprpriately to use an alternative upload/download folder that does allow SFTP/SCP file transfer (See "Config File Setup" later in this README regarding the config.json file). + +**For Windows orchestrated servers:** +1. Make sure that WinRM is set up on the orchestrated server and that the WinRM port is part of the certificate store path when setting up your certificate stores When creating a new certificate store in Keyfactor Command (See "Creating Certificate Stores" later in this README). + +2. When creating/configuring a certificate store in Keyfactor Command, you will see a "Change Credentials" link after entering in the destination client machine (IP or DNS). This link **must** be clicked on to present the credentials dialog. However, it is not required that you enter separate credentials. Simply click SAVE in the resulting dialog without entering in credentials to use the credentials that the Keyfactor Orchestrator Service is running under. Alternatively, you may enter separate credentials into this dialog and use those to connect to the orchestrated server. + +**SSH Key-Based Authentiation** +1. When creating a Keyfactor certificate store for the remote file orchestrator extension (see "Creating Certificate Stores" later in this README, you may supply either a user id and password for the certificate store credentials (directly or through one of Keyfactor Command's PAM integrations), or a user id and SSH private key. Both PKCS#1 (BEGIN RSA PRIVATE KEY) and PKCS#8 (BEGIN PRIVATE KEY) formats are supported for the SSH private key. If using the normal Keyfactor Command credentials dialog without PAM integration, just copy and paste the full SSH private key into the Password textbox. +  +  +## Remote File Orchestrator Extension Installation +1. Create the certificate store types you wish to manage. Please refer to the individual sections devoted to each supported store type under "Certificate Store Types" later in this README. +2. Stop the Keyfactor Universal Orchestrator Service for the orchestrator you plan to install this extension to run on. +3. In the Keyfactor Orchestrator installation folder (by convention usually C:\Program Files\Keyfactor\Keyfactor Orchestrator), find the "Extensions" folder. Underneath that, create a new folder named "RemoteFile". You may choose to use a different name if you wish. +4. Download the latest version of the RemoteFile orchestrator extension from [GitHub](https://github.com/Keyfactor/remote-file-orchestrator). Click on the "Latest" release link on the right hand side of the main page and download the first zip file. +5. Copy the contents of the download installation zip file to the folder created in Step 3. +6. (Optional) If you decide to create one or more certificate store types with short names different than the suggested values (please see the individual certificate store type sections in "Certificate Store Types" later in this README for more information regarding certificate store types), edit the manifest.json file in the folder you created in step 3, and modify each "ShortName" in each "Certstores.{ShortName}.{Operation}" line with the ShortName you used to create the respective certificate store type. If you created it with the suggested values, this step can be skipped. +7. Modify the config.json file (See the "Configuration File Setup" section later in this README) +8. Start the Keyfactor Universal Orchestrator Service. +  +  +## Configuration File Setup + +The Remote File Orchestrator Extension uses a JSON configuration file. It is located in the {Keyfactor Orchestrator Installation Folder}\Extensions\RemoteFile. None of the values are required, and a description of each follows below: +{ + "UseSudo": "N", + "CreateStoreIfMissing": "N", + "UseNegotiate": "N", + "SeparateUploadFilePath": "", + "FileTransferProtocol": "SCP", + "DefaultLinuxPermissionsOnStoreCreation": "600" +} + +**UseSudo** (Applicable for Linux orchestrated servers only) - Y/N - Determines whether to prefix certain Linux command with "sudo". This can be very helpful in ensuring that the user id running commands over an ssh connection uses "least permissions necessary" to process each task. Setting this value to "Y" will prefix all Linux commands with "sudo" with the expectation that the command being executed on the orchestrated Linux server will look in the sudoers file to determine whether the logged in ID has elevated permissions for that specific command. For Windows orchestrated servers, this setting has no effect. Setting this value to "N" will result in "sudo" not being added to Linux commands. **Default value if missing - N**. +**CreateStoreOnAddIfMissing** - Y/N - Determines, during a Management-Add job, if a certificate store should be created if it does not already exist. If set to "N", and the store referenced in the Management-Add job is not found, the job will return an error with a message stating that the store does not exist. If set to "Y", the store will be created and the certificate added to the certificate store. **Default value if missing - N**. +**UseNegotiateAuth** (Applicable for Windows orchestrated servers only) – Y/N - Determines if WinRM should use Negotiate (Y) when connecting to the remote server. **Default Value if missing - N**. +**SeparateUploadFilePath**(Applicable for Linux managed servers only) – Set this to the path you wish to use as the location on the orchestrated server to upload/download and later remove temporary work files when processing jobs. If set to "" or not provided, the location of the certificate store itself will be used. File transfer itself is performed using SCP or SFTP protocols (see FileT ransferProtocol setting). **Default Value if missing - blank**. +**FileTransferProtocol** (Applicable for Linux orchestrated servers only) - SCP/SFTP/Both - Determines the protocol to use when uploading/downloading files while processing a job. Valid values are: SCP - uses SCP, SFTP - uses SFTP, or Both - will attempt to use SCP first, and if that does not work, will attempt the file transfer via SFTP. **Default Value if missing - SCP**. +**DefaultLinuxPermissionsOnStoreCreation** (Applicable for Linux managed servers only) - Value must be 3 digits all between 0-7. The Linux file permissions that will be set on a new certificate store created via a Management Create job or a Management Add job where CreateStoreOnAddIsMissing is set to "Y". This value will be used for all certificate stores managed by this orchestrator instance unless overridden by the optional "Linux File Permissions on Store Creation" custom parameter setting on a specific certificate store (See the "Certificatee Store Types Supported" section later in this README). **Default Value if missing - 600**. +  +  +## Certificate Store Types + +When setting up the certificate store types you wish the Remote File Orchestrator Extension to manage, there are some common settings that will be the same for all supported types. To create a new Certificate Store Type in Keyfactor Command, first click on settings (the gear icon on the top right) => Certificate Store Types => Add. Alternatively, there are CURL scripts for all of the currently implemented certificate store types in the Certificate Store Type CURL Scripts folder in this repo if you wish to automate the creation of the desired store types. + +**Common Values:** +*Basic Tab:* +- **Name** – Required. The display name you wish to use for the new Certificate Store Type. +- **ShortName** - Required. See specific certificate store type instructions below. +- **Custom Capability** - Unchecked +- **Supported Job Types** - Inventory, Add, Remove, Create, and Discovery should all be checked. +- **Needs Server** - Checked +- **Blueprint Allowed** - Checked if you wish to mske use of blueprinting. Pleaes refer to the Keyfactor Command Reference Guide for more details on this feature. +- **Uses PoserShell** - Unchecked +- **Requires Store Password** - Checked. NOTE: This does not require that a certificate store have a password, but merely ensures that a user who creates a Keyfactor Command Certificate Store MUST click the Store Password button and either enter a password or check No Password. Certificate stores with no passwords are still possible for certain certificate store types when checking this option. +- **Supports Entry Password** - Unchecked. + +*Advanced Tab:* +- **Store Path Type** - Freeform +- **Supports Custom Alias** - See specific certificate store type instructions below. +- **Private Key Handling** - See specific certificate store type instructions below +- **PFX Password Style** - Default + +*Custom Fields Tab:* +- **Name:** linuxFilePermissionsOnStoreCreation, **Display Name:** Linux File Permissions on Store Creation, **Type:** String, **Default Value:** none. This custom field is **not required**. If not present, value reverts back to DefaultLinuxPermissionsOnStoreCreation setting in config.json (see Configuration File Setup section above). This value, applicable to certificate stores hosted on Linux orchestrated servers only, must be 3 digits all between 0-7. This represents the Linux file permissions that will be set for this certificate store if created via a Management Create job or a Management Add job where the config.json option CreateStoreOnAddIsMissing is set to "Y". + +Entry Parameters Tab: +- See specific certificate store type instructions below + +  +  +************************************** +**RFPkcs12 Certificate Store Type** +************************************** + +The RFPkcs12 store type can be used to manage any PKCS#12 compliant file format INCLUDING java keystores of type PKCS12. + +Use cases supported: +1. One-to-many trust entries - A single certificate without a private key in a certificate store. Each certificate identified with a custom alias or certificate thumbprint. +2. One-to-many key entries - One-to-many certificates with private keys and optionally the full certificate chain. Each certificate identified with a custom alias or certificate thumbprint. +3. A mix of trust and key entries. + +**Specific Certificate Store Type Values** +*Basic Tab:* +- **Short Name** – Required. Suggested value - **RFPkcs12**. If you choose to use a different value you must make the corresponding modification to the manifest.json file (see "Remote File Orchestrator Extension Installation", step 6 above). + +*Advanced Tab:* +- **Supports Custom Alias** - Required. +- **Private Key Handling** - Optional. + +*Custom Fields Tab:* +- no adittional custom fields/parameters + +Entry Parameters Tab: +- no additional entry parameters + +  +CURL script to automate certificate store type creation can be found [here](https://github.com/Keyfactor/remote-file-orchestrator/blob/initial-version/Certificate%20Store%20Type%20CURL%20Scripts/PKCS12.curl) + +  +  +************************************** +**RFJKS Certificate Store Type** +************************************** + +The RFJKS store type can be used to manage java keystores of type JKS. **PLEASE NOTE:** Java keystores of type PKCS12 **_cannot_** be managed by the RFJKS type. You **_must_** use RFPkcs12. + +Use cases supported: +1. One-to-many trust entries - A single certificate without a private key in a certificate store. Each certificate identified with a custom alias or certificate thumbprint. +2. One-to-many key entries - One-to-many certificates with private keys and optionally the full certificate chain. Each certificate identified with a custom alias or certificate thumbprint. +3. A mix of trust and key entries. + +**Specific Certificate Store Type Values** +*Basic Tab:* +- **Short Name** – Required. Suggested value - **RFJKS**. If you choose to use a different value you must make the corresponding modification to the manifest.json file (see "Remote File Orchestrator Extension Installation", step 6 above). + +*Advanced Tab:* +- **Supports Custom Alias** - Required. +- **Private Key Handling** - Optional. + +*Custom Fields Tab:* +- no adittional custom fields/parameters + +Entry Parameters Tab: +- no additional entry parameters + +  +CURL script to automate certificate store type creation can be found [here](https://github.com/Keyfactor/remote-file-orchestrator/blob/initial-version/Certificate%20Store%20Type%20CURL%20Scripts/JKS.curl) + +  +  +************************************** +**RFPEM Certificate Store Type** +************************************** + +The RFPEM store type can be used to manage PEM encoded files. + +Use cases supported: +1. Trust stores - A file with one-to-many certificates (no private keys, no certificate chains). +2. Single certificate stores with private key in the file. +3. Single certificate stores with certificate chain and private key in the file. +4. Single certificate stores with private key in an external file. +5. Single certificate stores with certificate chain in the file and private key in an external file + +**Specific Certificate Store Type Values** +*Basic Tab:* +- **Short Name** – Required. Suggested value - **RFPEM**. If you choose to use a different value you must make the corresponding modification to the manifest.json file (see "Remote File Orchestrator Extension Installation", step 6 above). + +*Advanced Tab:* +- **Supports Custom Alias** - Forbidden. +- **Private Key Handling** - Optional. + +*Custom Fields Tab:* +- **Name:** IsTrustStore, **Display Name:** Trust Store, **Type:** Bool, **Default Value:** false. This custom field is **not required**. Default value if not present is 'false'. If 'true', this store will be identified as a trust store. Any certificates attempting to be added via a Management-Add job that contain a private key will raise an error with an accompanying message. Multiple certificates may be added to the store in this use case. If set to 'false', this store can only contain a single certificate with chain and private key. Management-Add jobs attempting to add a certificate without a private key to a store marked as IsTrustStore = 'false' will raise an error with an accompanying message. +- **Name:** IncludesChain, **Display Name:** Store Includes Chain, **Type:** Bool, **Default Value:** false. This custom field is **not required**. Default value if not present is 'false'. If 'true' the full certificate chain, if sent by Keyfactor Command, will be stored in the file. The order of appearance is always assumed to be 1) end entity certificate, 2) issuing CA certificate, and 3) root certificate. If additional CA tiers are applicable, the order will be end entity certificate up to the root CA certificate. if set to 'false', only the end entity certificate and private key will be stored in this store. This setting is only valid when IsTrustStore = false. +- **Name:** SeparatePrivateKeyFilePath, **Display Name:** Separate Private Key File Location, **Type:** String, **Default Value:** empty. This custom field is **not required**. If empty, or not provided, it will be assumed that the private key for the certificate stored in this file will be inside the same file as the certificate. If the full path AND file name is put here, that location will be used to store the private key as an external file. This setting is only valid when IsTrustStore = false. + +Entry Parameters Tab: +- no additional entry parameters + +  +CURL script to automate certificate store type creation can be found [here](https://github.com/Keyfactor/remote-file-orchestrator/blob/initial-version/Certificate%20Store%20Type%20CURL%20Scripts/PEM.curl) +  +  +## Creating Certificate Stores + +Please refer to the Keyfactor Command Reference Guide for information on creating certificate stores in Keyfactor Command. However, there are two fields that are important to highlight here - Client Machine and Store Path. For Linux orchestrated servers, "Client Machine" should be the DNS or IP address of the remote orchestrated server while "Store Path" is the full path and file name of the file based store, beginning with a forward slash (/). For Windows orchestrated servers, "Client Machine" should be of the format {protocol}://{dns-or-ip}:{port} where {protocol} is either http or https, {dns-or-ip} is the DNS or IP address of the remote orchestrated server, and {port} is the port where WinRM is listening, by convention usually 5985 for http and 5986 for https. "Store Path" is the full path and file name of the file based store, beginning with a drive letter (i.e. c:\). +  +  +## Developer Notes + +The Remote File Orchestrator Extension is meant to be extended to be used for other file based certificate store types than the ones referenced above. The advantage to extending this integration rather than creating a new one is that the configuration, remoting, and Inventory/Management/Discovery logic is already written. The developer needs to only implement a few classes and write code to convert the destired file based store to a common format. This section describes the steps necessary to add additional store/file types. Please note that familiarity with the [.Net Core BouncyCastle cryptography library](https://github.com/bcgit/bc-csharp) is a prerequisite for adding additional supported file/store types. + +Steps to create a new supported file based certificate store type: + +1. Clone this repository from GitHub +2. Open the .net core solution in the IDE of your choice +3. Under the ImplementationStoreTypes folder, create a new folder named for the new certificate store type +4. Create a new class (with namespace of Keyfactor.Extensions.Orchestrator.RemoteFile.{NewType}) in the new folder that will implement ICertificateStoreSerializer. By convention, {StoreTypeName}CertificateSerializer would be a good choice for the class name. This interface requires you to implement two methods: DesrializeRemoteCertificateStore and SerializeRemoteCertificateStore. The first method will be called passing in a byte array containing the contents of file based store you are managing. The developer will need to convert that to an Org.BouncyCastle.Pkcs.Pkcs12Store class and return it. The second method takes in an Org.BouncyCastle.Pkcs.Pkcs12Store and converts it to a collection of custom file representations, List. This is where the majority of the development will be done. +5. Create an Inventory.cs class (with namespace of Keyfactor.Extensions.Orchestrator.RemoteFile.{NewType}) under the new folder and have it inherit InventoryBase. Override the internal GetCertificateStoreSerializer() method with a one line implementation returning a new instantiation of the class created in step 4. +6. Create a Management.cs class (with namespace of Keyfactor.Extensions.Orchestrator.RemoteFile.{NewType}) under the new folder and have it inherit ManagementBase. Override the internal GetCertificateStoreSerializer() method with a one line implementation returning a new instantiation of the class created in step 4. +7. Modify the manifest.json file to add three new sections (for Inventory, Management, and Discovery). Make sure for each, the "NewType" in Certstores.{NewType}.{Operation}, matches what you will use for the certificate store type short name in Keyfactor Command. On the "TypeFullName" line for all three sections, make sure the namespace matches what you used for your new classes. Note that the namespace for Discovery uses a common class for all supported types. Discovery is a common implementation for all supported store types. +8. After compiling, move all compiled files, including the config.json and manifest.json to {Keyfactor Orchestrator Installation Folder}\Extensions\RemoteFile. +9. Create the certificate store type in Keyfactor Command +10. Add a new CURL script to build the proper Keyfactor Command certificate store type and place it under "Certificate Store Type CURL Scripts". The name of the file should match the ShortName you are using for the new store type. +11. Update the documenation in readme_source.md by adding a new section under "Certificate Store Types" for this new supported file based store type. Include a pointer to the CURL script created in step 10. +  +  +## License +[Apache](https://apache.org/licenses/LICENSE-2.0) +