Skip to content

Commit

Permalink
[SPARK-542] Add and document Driver<->Executor TLS support. (apache#188)
Browse files Browse the repository at this point in the history
* add env var decoding and documentation about driver-to-executor tls support

* Switched to file-based secrets.

* Reverted spark-env.sh change. Added TLS integration test (currently disabled).

* Updated docs, incorporating suggestions from Suzanne.

* Removed mention of driver labels in docs.

* Consolidated secrets creation code, removed SecretHandler.

* Simplified the options in the run command.

* Simplified the interface by automatically inferring some of the TLS config properties.

* Better --help and error messages, updated docs.

* Make sure user-defined secrets are preserved. Updated test and docs.

* Converted passwords to "magic" options. Updated test and docs.

* Added blurb in limitations docs section, moved setting of password configs.
  • Loading branch information
Arthur Rand authored and susanxhuynh committed Nov 22, 2017
1 parent 28ea345 commit 022f332
Show file tree
Hide file tree
Showing 8 changed files with 269 additions and 53 deletions.
95 changes: 94 additions & 1 deletion cli/dcos-spark/submit_builder.go
Expand Up @@ -43,6 +43,11 @@ type sparkArgs struct {
keytabSecretPath string
tgtSecretPath string
tgtSecretValue string
keystoreSecretPath string
keystorePassword string
privateKeyPassword string
truststoreSecretPath string
truststorePassword string
propertiesFile string
properties map[string]string

Expand All @@ -59,6 +64,11 @@ type sparkArgs struct {

func NewSparkArgs() *sparkArgs {
return &sparkArgs{
"",
"",
"",
"",
"",
"",
"",
"",
Expand Down Expand Up @@ -140,12 +150,24 @@ Args:
PlaceHolder("PROP=VALUE").StringMapVar(&args.properties)
submit.Flag("kerberos-principal", "Principal to be used to login to KDC.").
PlaceHolder("user@REALM").Default("").StringVar(&args.kerberosPrincipal)
submit.Flag("keytab-secret-path", "path to Keytab in secret store to be used in the Spark drivers").
submit.Flag("keytab-secret-path", "Path to Keytab in secret store to be used in the Spark drivers").
PlaceHolder("/mykeytab").Default("").StringVar(&args.keytabSecretPath)
submit.Flag("tgt-secret-path", "Path to ticket granting ticket (TGT) in secret store to be used " +
"in the Spark drivers").PlaceHolder("/mytgt").Default("").StringVar(&args.tgtSecretPath)
submit.Flag("tgt-secret-value", "Value of TGT to be used in the drivers, must be base64 encoded").
Default("").StringVar(&args.tgtSecretValue)
submit.Flag("keystore-secret-path", "Path to keystore in secret store for TLS/SSL. " +
"Make sure to set --keystore-password and --private-key-password as well.").
PlaceHolder("__dcos_base64__keystore").Default("").StringVar(&args.keystoreSecretPath)
submit.Flag("keystore-password", "A password to the keystore.").
Default("").StringVar(&args.keystorePassword)
submit.Flag("private-key-password", "A password to the private key in the keystore.").
Default("").StringVar(&args.privateKeyPassword)
submit.Flag("truststore-secret-path", "Path to truststore in secret store for TLS/SSL. " +
"Make sure to set --truststore-password as well.").
PlaceHolder("__dcos_base64__truststore").Default("").StringVar(&args.truststoreSecretPath)
submit.Flag("truststore-password", "A password to the truststore.").
Default("").StringVar(&args.truststorePassword)

submit.Flag("isR", "Force using SparkR").Default("false").BoolVar(&args.isR)
submit.Flag("isPython", "Force using Python").Default("false").BoolVar(&args.isPython)
Expand Down Expand Up @@ -257,6 +279,52 @@ func setupKerberosAuthArgs(args *sparkArgs) error {
return errors.New(fmt.Sprintf("Unable to add Kerberos args, got args %s", args))
}

func setupTLSArgs(args *sparkArgs) {
args.properties["spark.mesos.containerizer"] = "mesos"
args.properties["spark.ssl.enabled"] = "true"

// Keystore and truststore
const keyStoreFileName = "server.jks"
const trustStoreFileName = "trust.jks"
args.properties["spark.ssl.keyStore"] = keyStoreFileName
if args.truststoreSecretPath != "" {
args.properties["spark.ssl.trustStore"] = trustStoreFileName
}

// Secret paths, filenames, and place holder envvars
paths := []string{args.keystoreSecretPath}
filenames := []string{keyStoreFileName}
envkeys := []string{"DCOS_SPARK_KEYSTORE"}
if args.truststoreSecretPath != "" {
paths = append(paths, args.truststoreSecretPath)
filenames = append(filenames, trustStoreFileName)
envkeys = append(envkeys, "DCOS_SPARK_TRUSTSTORE")
}
joinedPaths := strings.Join(paths, ",")
joinedFilenames := strings.Join(filenames, ",")
joinedEnvkeys := strings.Join(envkeys, ",")

taskTypes :=[]string{"driver", "executor"}
for _, taskType := range taskTypes {
appendToProperty(fmt.Sprintf("spark.mesos.%s.secret.names", taskType), joinedPaths, args)
appendToProperty(fmt.Sprintf("spark.mesos.%s.secret.filenames", taskType), joinedFilenames, args)
appendToPropertyIfSet(fmt.Sprintf("spark.mesos.%s.secret.envkeys", taskType), joinedEnvkeys, args)
}

// Passwords
args.properties["spark.ssl.keyStorePassword"] = args.keystorePassword
args.properties["spark.ssl.keyPassword"] = args.privateKeyPassword

if args.truststoreSecretPath != "" {
args.properties["spark.ssl.trustStorePassword"] = args.truststorePassword
}

// Protocol
if _, ok := args.properties["spark.ssl.protocol"]; !ok {
args.properties["spark.ssl.protocol"] = "TLS"
}
}

func parseApplicationFile(args *sparkArgs) error {
appString := args.app.String()
fs := strings.Split(appString, "/")
Expand Down Expand Up @@ -435,6 +503,13 @@ func appendToProperty(propValue, toAppend string, args *sparkArgs) {
}
}

func appendToPropertyIfSet(propValue, toAppend string, args *sparkArgs) {
_, contains := args.properties[propValue]
if contains {
args.properties[propValue] += "," + toAppend
}
}

func getBase64Content(path string) string {
log.Printf("Opening file %s", path)
data, err := ioutil.ReadFile(path)
Expand Down Expand Up @@ -573,6 +648,8 @@ func buildSubmitJson(cmd *SparkCommand) (string, error) {
log.Printf("Setting DCOS_SPACE to %s", cmd.submitDcosSpace)
appendToProperty("spark.mesos.driver.labels", fmt.Sprintf("DCOS_SPACE:%s", cmd.submitDcosSpace),
args)
appendToProperty("spark.mesos.task.labels", fmt.Sprintf("DCOS_SPACE:%s", cmd.submitDcosSpace),
args)

// HDFS config
hdfs_config_url, err := getStringFromTree(responseJson, []string{"app", "labels", "SPARK_HDFS_CONFIG_URL"})
Expand Down Expand Up @@ -620,6 +697,22 @@ func buildSubmitJson(cmd *SparkCommand) (string, error) {
}
}

// TLS configuration
if args.keystoreSecretPath != "" {
// Make sure passwords are set
if args.keystorePassword == "" || args.privateKeyPassword == "" {
return "", errors.New("Need to provide keystore password and key password with keystore")
}

if args.truststoreSecretPath != "" {
if args.truststorePassword == "" {
return "", errors.New("Need to provide truststore password with truststore")
}
}

setupTLSArgs(args)
}

jsonMap := map[string]interface{}{
"action": "CreateSubmissionRequest",
"appArgs": args.appArgs,
Expand Down
5 changes: 5 additions & 0 deletions docs/limitations.md
Expand Up @@ -10,3 +10,8 @@ enterprise: 'no'
* Spark jobs run in Docker containers. The first time you run a Spark job on a node, it might take longer than you expect because of the `docker pull`.

* DC/OS Apache Spark only supports running the Spark shell from within a DC/OS cluster. See the Spark Shell section for more information. For interactive analytics, we recommend Zeppelin, which supports visualizations and dynamic dependency management.

* With Spark SSL/TLS enabled,
if you specify environment-based secrets with `spark.mesos.[driver|executor].secret.envkeys`,
the keystore and truststore secrets will also show up as environment-based secrets,
due to the way secrets are implemented. You can ignore these extra environment variables.
82 changes: 56 additions & 26 deletions docs/security.md
Expand Up @@ -15,48 +15,78 @@ Follow these instructions to [authenticate in strict mode](https://docs.mesosphe
SSL support in DC/OS Apache Spark encrypts the following channels:

* From the [DC/OS admin router][11] to the dispatcher.
* From the dispatcher to the drivers.
* From the drivers to their executors.

There are a number of configuration variables relevant to SSL setup. List them with the following command:
To enable SSL, a Java keystore (and, optionally, truststore) must be provided, along
with their passwords. The first three settings below are **required** during job
submission. If using a truststore, the last two are also **required**:

dcos package describe spark --config
| Variable | Description |
|----------------------------------|-------------------------------------------------|
| `--keystore-secret-path` | Path to keystore in secret store |
| `--keystore-password` | The password used to access the keystore |
| `--private-key-password` | The password for the private key |
| `--truststore-secret-path` | Path to truststore in secret store |
| `--truststore-password` | The password used to access the truststore |

Here are the required variables:

| Variable | Description |
|----------------------------|-------------------------------------------------|
| `spark.ssl.enabled` | Whether to enable SSL (default: `false`). |
| `spark.ssl.keyStoreBase64` | Base64 encoded blob containing a Java keystore. |
In addition, there are a number of Spark configuration variables relevant to SSL setup.
These configuration settings are **optional**:

The Java keystore (and, optionally, truststore) are created using the [Java keytool][12]. The keystore must contain one private key and its signed public key. The truststore is optional and might contain a self-signed root-ca certificate that is explicitly trusted by Java.
| Variable | Description | Default Value |
|----------------------------------|-----------------------|---------------|
| `spark.ssl.enabledAlgorithms` | Allowed cyphers | JVM defaults |
| `spark.ssl.protocol` | Protocol | TLS |

Both stores must be base64 encoded, for example:

cat keystore | base64 /u3+7QAAAAIAAAACAAAAAgA...
The keystore and truststore are created using the [Java keytool][12]. The keystore
must contain one private key and its signed public key. The truststore is optional
and might contain a self-signed root-ca certificate that is explicitly trusted by Java.

Both stores must be base64 encoded without newlines, for example:

```bash
cat keystore | base64 -w 0 > keystore.base64
cat keystore.base64
/u3+7QAAAAIAAAACAAAAAgA...
```

**Note:** The base64 string of the keystore will probably be much longer than the snippet above, spanning 50 lines or so.

With this and the password `secret` for the keystore and the private key, your JSON options file will look like this:
Add the stores to your secrets in the DC/OS secret store. For example, if your base64-encoded keystores
and truststores are server.jks.base64 and trust.jks.base64, respectively, then use the following
commands to add them to the secret store:

```bash
dcos security secrets create /__dcos_base64__keystore --value-file server.jks.base64
dcos security secrets create /__dcos_base64__truststore --value-file trust.jks.base64
```

You must add the following configurations to your `dcos spark run ` command.
The ones in parentheses are optional:

{
"security": {
"ssl": {
"enabled": true,
"keyStoreBase64": "/u3+7QAAAAIAAAACAAAAAgA...”,
"keyStorePassword": "secret",
"keyPassword": "secret"
}
}
}
```bash

Install Spark with your custom configuration:
dcos spark run --verbose --submit-args="\
--keystore-secret-path=<path/to/keystore, e.g. __dcos_base64__keystore> \
--keystore-password=<password to keystore> \
--private-key-password=<password to private key in keystore> \
(—-truststore-secret-path=<path/to/truststore, e.g. __dcos_base64__truststore> \)
(--truststore-password=<password to truststore> \)
(—-conf spark.ssl.enabledAlgorithms=<cipher, e.g., TLS_RSA_WITH_AES_128_CBC_SHA256> \)
--class <Spark Main class> <Spark Application JAR> [application args]"
```

dcos package install --options=options.json spark
**Note:** If you have specified a space for your secrets other than the default value,
`/spark`, then you must set `spark.mesos.task.labels=DCOS_SPACE:<dcos_space>`
in the command above in order to access the secrets.
See the [Secrets Documentation about spaces][13] for more details about spaces.

Make sure to connect the DC/OS cluster only using an SSL connection (i.e. by using an `https://<dcos-url>`). Use the following command to set your DC/OS URL:
**Note:** If you specify environment-based secrets with `spark.mesos.[driver|executor].secret.envkeys`,
the keystore and truststore secrets will also show up as environment-based secrets,
due to the way secrets are implemented. You can ignore these extra environment variables.

dcos config set core.dcos_url https://<dcos-url>

[11]: https://docs.mesosphere.com/1.9/overview/architecture/components/
[12]: http://docs.oracle.com/javase/8/docs/technotes/tools/unix/keytool.html
[13]: https://docs.mesosphere.com/1.10/security/#spaces
48 changes: 48 additions & 0 deletions tests/jobs/python/pi_with_secret.py
@@ -0,0 +1,48 @@
import os
import sys
from random import random
from operator import add

from pyspark.sql import SparkSession

def check_secret(secret_name, secret_content):
'''
Make sure the extra secret envvar and secret file show up in driver.
'''
envvar_content = os.environ.get(secret_name)
if envvar_content != secret_content:
print("Unexpected contents in secret envvar, found: {} expected: {}".format(envvar_content, secret_content))
exit(1)

file_content = open(secret_name, 'r').read()
if file_content != secret_content:
print("Unexpected contents in secret file, found: {} expected: {}".format(file_content, secret_content))
exit(1)


if __name__ == "__main__":
"""
Usage: pi [partitions] [secret] [secret content]
Checks for the given env-based and file-based driver secret.
Then calculates the value of pi.
"""

check_secret(sys.argv[2], sys.argv[3])

spark = SparkSession \
.builder \
.appName("PythonPi") \
.getOrCreate()

partitions = int(sys.argv[1])
n = 100000 * partitions

def f(_):
x = random() * 2 - 1
y = random() * 2 - 1
return 1 if x ** 2 + y ** 2 < 1 else 0

count = spark.sparkContext.parallelize(range(1, n + 1), partitions).map(f).reduce(add)
print("Pi is roughly %f" % (4.0 * count / n))

spark.stop()
1 change: 1 addition & 0 deletions tests/resources/server.jks.base64
@@ -0,0 +1 @@
/u3+7QAAAAIAAAACAAAAAgAGY2Fyb290AAABXxZdYVoABVguNTA5AAACtjCCArIwggIboAMCAQICCQCzNKodBAdsRzANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMCAXDTE3MTAxMzE1Mjg0MloYDzIxMTcwOTE5MTUyODQyWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPdPSuUvDBpt6+1nV6YlTaaNZjd6OsmWm1tRllnpeJfnlXai3HkmQmRuTtbs2Foru5JVtHynPG1vWS/VeOMSBZYeDBHA2s2yLhWcWLFutPKZbNJ3Cf8OjGFf8wqfVBF8xGjUiaQUBTsrHvYlwRkX+bUo7074f5FgLOz15mdomZCQIDAQABo4GnMIGkMB0GA1UdDgQWBBQRjmxtDBg7vrMDMAH4y78lVhPtRjB1BgNVHSMEbjBsgBQRjmxtDBg7vrMDMAH4y78lVhPtRqFJpEcwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZIIJALM0qh0EB2xHMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEANqti2ozB3tlwTVyiPidrVK7n5RMLZQNsDJ68WMl1p7DhtSi4dU87kkNSAFIwHgndVr10CWhfa6TCFW4eVM/bGCEDK5ay+zhZRp+B+YYAKKQZatiMC/EXTdJ2pc+vEcEDF3vAbZ1hzUBbNEWZKkYNDnHOB9dfeQVv6w21P9wdc4gAAAABAAlsb2NhbGhvc3QAAAFfFl2Z7wAAAY8wggGLMA4GCisGAQQBKgIRAQEFAASCAXf8ccU6jYrFx5kW4S+fBJXuQOcCedY9TzOS2bPyouSwPCs42Zo6p0+CSbvv+JqYfMDTR5YbsjMR4xDwQ6B2iW/ldXX6ud0m1TtWroiQV0WSkmqI2s+yai40zjH8Z6/HdcEPSInoC+aXvTvQeK2bstZvPHUsa8H9ZwvKV2tsLjeg7kgoMFQAiMmSvnHawGO/icPwkJTctKEQnL9Jd6yGR2kc0FAAfJusuxYuIIsSXcw4Zisj7d5uRwPv4ecjOwCut1M8U/qs+Dz+LK8sFqdzVY2DyU0BhK8v7WLd2tAyNdcTnUVAQFrhdF4YX8EeMPA7Suyg5IsMo8/GD6zWtKZMDHDba1GelTnfCj7SQyT0dFRGYL/s0jC0DRQiX/2NYPaiNd43eo0yinxOe63L47XIQdjRjfFuBasrONYO4/ccjM84YB/YvbC6Lt0ppkkgGahFgqxhzG4ZSwAEdo9mRHWK1ORyZG8sH8qWdk7o+37yB8j7zIJcsU5EugIAAAACAAVYLjUwOQAAA0gwggNEMIICrQIJALjUH5yBVyi+MA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwIBcNMTcxMDEzMTUzMTMwWhgPMjExNzA5MTkxNTMxMzBaMGwxEDAOBgNVBAYTB1Vua25vd24xEDAOBgNVBAgTB1Vua25vd24xEDAOBgNVBAcTB1Vua25vd24xEDAOBgNVBAoTB1Vua25vd24xEDAOBgNVBAsTB1Vua25vd24xEDAOBgNVBAMTB1Vua25vd24wggG4MIIBLAYHKoZIzjgEATCCAR8CgYEA/X9TgR11EilS30qcLuzk5/YRt1I870QAwx4/gLZRJmlFXUAiUftZPY1Y+r/F9bow9subVWzXgTuAHTRv8mZgt2uZUKWkn5/oBHsQIsJPu6nX/rfGG/g7V+fGqKYVDwT7g/bTxR7DAjVUE1oWkTL2dfOuK2HXKu/yIgMZndFIAccCFQCXYFCPFSMLzLKSuYKi64QL8Fgc9QKBgQD34aCF1ps93su8q1w2uFe5eZSvu/o66oL5V0wLPQeCZ1FZV4661FlP5nEHEIGAtEkWcSPoTCgWE7fPCTKMyKbhPBZ6i1R8jSjgo64eK7OmdZFuo38L+iE1YvH7YnoBJDvMpPG+qFGQiaiD3+Fa5Z8GkotmXoB7VSVkAUw7/s9JKgOBhQACgYEAu0U7jSWyAsVcIzCFBt9harR2Ajl9RDB/4nmSYOGGL52uI6kNH9p7PPeKWtgwiCSoIpWypo+GovjVubFKabKM9EbbRRjK8XJyU2TEtXVy3iiEAJM3mVBaSEA8xfmoQz4+ZjfI+fPOvZs3e7xUHwm3Kpu6WgidORSlnKL44MGQfZEwDQYJKoZIhvcNAQEFBQADgYEAbp/wgDQOmdBWKjxtu2N2k2adlY3X8V/sJWG3s/ewBRLdeEiX/qTiBGZQ4S/o3PrkmvqAkIMk4ZYIF+et/O8P0T8YBijA82H8zh6bH9HfT6n/88M0GCTtLqg1hmTeUthRyKKwKkDMzcjVT7NXi3zdf1Upnqzk9IgUNL9VrxO1s1wABVguNTA5AAACtjCCArIwggIboAMCAQICCQCzNKodBAdsRzANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMCAXDTE3MTAxMzE1Mjg0MloYDzIxMTcwOTE5MTUyODQyWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPdPSuUvDBpt6+1nV6YlTaaNZjd6OsmWm1tRllnpeJfnlXai3HkmQmRuTtbs2Foru5JVtHynPG1vWS/VeOMSBZYeDBHA2s2yLhWcWLFutPKZbNJ3Cf8OjGFf8wqfVBF8xGjUiaQUBTsrHvYlwRkX+bUo7074f5FgLOz15mdomZCQIDAQABo4GnMIGkMB0GA1UdDgQWBBQRjmxtDBg7vrMDMAH4y78lVhPtRjB1BgNVHSMEbjBsgBQRjmxtDBg7vrMDMAH4y78lVhPtRqFJpEcwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZIIJALM0qh0EB2xHMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEANqti2ozB3tlwTVyiPidrVK7n5RMLZQNsDJ68WMl1p7DhtSi4dU87kkNSAFIwHgndVr10CWhfa6TCFW4eVM/bGCEDK5ay+zhZRp+B+YYAKKQZatiMC/EXTdJ2pc+vEcEDF3vAbZ1hzUBbNEWZKkYNDnHOB9dfeQVv6w21P9wdc4imd0svG9i4vIb3CuGKLwgRGWtmYQ==
1 change: 1 addition & 0 deletions tests/resources/trust.jks.base64
@@ -0,0 +1 @@
/u3+7QAAAAIAAAABAAAAAgAGY2Fyb290AAABXxZa5REABVguNTA5AAACtjCCArIwggIboAMCAQICCQCzNKodBAdsRzANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMCAXDTE3MTAxMzE1Mjg0MloYDzIxMTcwOTE5MTUyODQyWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPdPSuUvDBpt6+1nV6YlTaaNZjd6OsmWm1tRllnpeJfnlXai3HkmQmRuTtbs2Foru5JVtHynPG1vWS/VeOMSBZYeDBHA2s2yLhWcWLFutPKZbNJ3Cf8OjGFf8wqfVBF8xGjUiaQUBTsrHvYlwRkX+bUo7074f5FgLOz15mdomZCQIDAQABo4GnMIGkMB0GA1UdDgQWBBQRjmxtDBg7vrMDMAH4y78lVhPtRjB1BgNVHSMEbjBsgBQRjmxtDBg7vrMDMAH4y78lVhPtRqFJpEcwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZIIJALM0qh0EB2xHMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEANqti2ozB3tlwTVyiPidrVK7n5RMLZQNsDJ68WMl1p7DhtSi4dU87kkNSAFIwHgndVr10CWhfa6TCFW4eVM/bGCEDK5ay+zhZRp+B+YYAKKQZatiMC/EXTdJ2pc+vEcEDF3vAbZ1hzUBbNEWZKkYNDnHOB9dfeQVv6w21P9wdc4iAVWlBXqa86mJGXpe7H0vZZ/++aA==

0 comments on commit 022f332

Please sign in to comment.