Skip to content

Commit

Permalink
Remove driver-side enclave dependency
Browse files Browse the repository at this point in the history
- Encryption (Utils.encryptInternalRowsFlatbuffers) now uses Java crypto on the
  driver and an enclave on the workers, controlled by a flag.

- Decryption is only called from the driver, so always uses Java crypto.

- For sorting, the `FindRangeBounds` preprocessing task is shipped to a single
  worker, where it uses the existing enclave implementation. (Thanks to András
  Méhes.)

Fixes mc2-project#64.
  • Loading branch information
ankurdave committed Feb 11, 2019
1 parent 5ddda15 commit 4f90dbf
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 9 deletions.
30 changes: 26 additions & 4 deletions src/main/scala/edu/berkeley/cs/rise/opaque/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -595,7 +595,18 @@ object Utils extends Logging {

val MaxBlockSize = 1000

def encryptInternalRowsFlatbuffers(rows: Seq[InternalRow], types: Seq[DataType]): Block = {
/**
* Encrypts the given Spark SQL [[InternalRow]]s into a [[Block]] (a serialized
* tuix.EncryptedBlocks).
*
* If `useEnclave` is true, it will attempt to use the local enclave. Otherwise, it will attempt
* to use the local encryption key, which is intended to be available only on the driver, not the
* workers.
*/
def encryptInternalRowsFlatbuffers(
rows: Seq[InternalRow],
types: Seq[DataType],
useEnclave: Boolean): Block = {
// For the encrypted blocks
val builder2 = new FlatBufferBuilder
val encryptedBlockOffsets = ArrayBuilder.make[Int]
Expand All @@ -615,8 +626,13 @@ object Utils extends Logging {
val plaintext = builder.sizedByteArray()

// 2. Encrypt the row data and put it into a tuix.EncryptedBlock
val (enclave, eid) = initEnclave()
val ciphertext = enclave.Encrypt(eid, plaintext)
val ciphertext =
if (useEnclave) {
val (enclave, eid) = initEnclave()
enclave.Encrypt(eid, plaintext)
} else {
encrypt(plaintext)
}

encryptedBlockOffsets += tuix.EncryptedBlock.createEncryptedBlock(
builder2,
Expand Down Expand Up @@ -659,6 +675,13 @@ object Utils extends Logging {
Block(encryptedBlockBytes)
}

/**
* Decrypts the given [[Block]] (a serialized tuix.EncryptedBlocks) and returns the rows within as
* Spark SQL [[InternalRow]]s.
*
* This function can only be called from the driver. The decryption key will not be available on
* the workers.
*/
def decryptBlockFlatbuffers(block: Block): Seq[InternalRow] = {
// 4. Extract the serialized tuix.EncryptedBlocks from the Scala Block object
val buf = ByteBuffer.wrap(block.bytes)
Expand All @@ -672,7 +695,6 @@ object Utils extends Logging {
ciphertextBuf.get(ciphertext)

// 2. Decrypt the row data
val (enclave, eid) = initEnclave()
val plaintext = decrypt(ciphertext)

// 1. Deserialize the tuix.Rows and return them as Scala InternalRow objects
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,12 @@ object EncryptedSortExec {
Block(sampledBlock)
}.collect)
}
// Find range boundaries locally
val (enclave, eid) = Utils.initEnclave()
// Find range boundaries parceled out to a single worker
val boundaries = time("non-oblivious sort - FindRangeBounds") {
enclave.FindRangeBounds(eid, orderSer, numPartitions, sampled.bytes)
childRDD.context.parallelize(Array(sampled.bytes), 1).map { sampledBytes =>
val (enclave, eid) = Utils.initEnclave()
enclave.FindRangeBounds(eid, orderSer, numPartitions, sampledBytes)
}.collect.head
}
// Broadcast the range boundaries and use them to partition the input
childRDD.flatMap { block =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ case class EncryptedLocalTableScanExec(
// Encrypt each local partition
val encryptedPartitions: Seq[Block] =
slicedPlaintextData.map(slice =>
Utils.encryptInternalRowsFlatbuffers(slice, output.map(_.dataType)))
Utils.encryptInternalRowsFlatbuffers(slice, output.map(_.dataType), useEnclave = false))

// Make an RDD from the encrypted partitions
sqlContext.sparkContext.parallelize(encryptedPartitions)
Expand All @@ -90,7 +90,8 @@ case class EncryptExec(child: SparkPlan)

override def executeBlocked(): RDD[Block] = {
child.execute().mapPartitions { rowIter =>
Iterator(Utils.encryptInternalRowsFlatbuffers(rowIter.toSeq, output.map(_.dataType)))
Iterator(Utils.encryptInternalRowsFlatbuffers(
rowIter.toSeq, output.map(_.dataType), useEnclave = true))
}
}
}
Expand Down

0 comments on commit 4f90dbf

Please sign in to comment.