Skip to content

Commit

Permalink
ORC-1305: Add CompressionWriter and `InMemoryEncryption[Reader|Writ…
Browse files Browse the repository at this point in the history
…er]` examples

### What changes were proposed in this pull request?
This PR is aimed to add more java examples.

### Why are the changes needed?
We need more examples in **InMemoryCryption** and **Compression**.

### How was this patch tested?
UT

Closes #1302 from deshanxiao/deshan/add-more-example.

Authored-by: deshanxiao <deshanxiao@microsoft.com>
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
  • Loading branch information
deshanxiao authored and dongjoon-hyun committed Nov 7, 2022
1 parent 9cf9d26 commit 31acaaa
Show file tree
Hide file tree
Showing 5 changed files with 219 additions and 0 deletions.
4 changes: 4 additions & 0 deletions java/examples/pom.xml
Expand Up @@ -36,6 +36,10 @@
<groupId>org.apache.orc</groupId>
<artifactId>orc-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.orc</groupId>
<artifactId>orc-shims</artifactId>
</dependency>

<!-- inter-project -->
<dependency>
Expand Down
@@ -0,0 +1,66 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.orc.examples;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.orc.OrcFile;
import org.apache.orc.OrcFile.WriterOptions;
import org.apache.orc.TypeDescription;
import org.apache.orc.Writer;

import java.io.IOException;

import static org.apache.orc.CompressionKind.SNAPPY;

public class CompressionWriter {
public static void main(Configuration conf, String[] args) throws IOException {
TypeDescription schema = TypeDescription.fromString("struct<x:int,y:string>");
// Set compress kind to snappy. Now we have much compression
// method such as ZLIB, SNAPPY, LZO, LZ4, ZSTD.
WriterOptions options = OrcFile.writerOptions(conf).setSchema(schema).compress(SNAPPY);
Writer snappyWriter = OrcFile.createWriter(new Path("compressed.orc"), options);

VectorizedRowBatch batch = schema.createRowBatch();
LongColumnVector x = (LongColumnVector) batch.cols[0];
BytesColumnVector y = (BytesColumnVector) batch.cols[1];
for (int r = 0; r < 10000; ++r) {
int row = batch.size++;
x.vector[row] = r;
byte[] buffer = ("byte-" + r).getBytes();
y.setRef(row, buffer, 0, buffer.length);
// If the batch is full, write it out and start over.
if (batch.size == batch.getMaxSize()) {
snappyWriter.addRowBatch(batch);
batch.reset();
}
}
if (batch.size != 0) {
snappyWriter.addRowBatch(batch);
}
snappyWriter.close();
}

public static void main(String[] args) throws IOException {
main(new Configuration(), args);
}
}
9 changes: 9 additions & 0 deletions java/examples/src/java/org/apache/orc/examples/Driver.java
Expand Up @@ -75,6 +75,9 @@ public static void main(String[] args) throws Exception {
System.err.println(" read - read a sample ORC file");
System.err.println(" write2 - write a sample ORC file with a map");
System.err.println(" read2 - read a sample ORC file with a map");
System.err.println(" compressWriter - write a ORC file with snappy compression");
System.err.println(" inMemoryEncryptionWriter - write a ORC file with encryption");
System.err.println(" inMemoryEncryptionReader - read a ORC file with encryption");
System.err.println();
System.err.println("To get more help, provide -h to the command");
System.exit(1);
Expand All @@ -95,6 +98,12 @@ public static void main(String[] args) throws Exception {
AdvancedWriter.main(conf, options.commandArgs);
} else if ("read2".equals(options.command)) {
AdvancedReader.main(conf, options.commandArgs);
} else if ("compressWriter".equals(options.command)) {
CompressionWriter.main(conf, options.commandArgs);
} else if ("inMemoryEncryptionWriter".equals(options.command)) {
InMemoryEncryptionWriter.main(conf, options.commandArgs);
} else if ("inMemoryEncryptionReader".equals(options.command)) {
InMemoryEncryptionReader.main(conf, options.commandArgs);
} else {
System.err.println("Unknown subcommand: " + options.command);
System.exit(1);
Expand Down
@@ -0,0 +1,68 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.orc.examples;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.orc.InMemoryKeystore;
import org.apache.orc.OrcFile;
import org.apache.orc.OrcFile.ReaderOptions;
import org.apache.orc.Reader;
import org.apache.orc.RecordReader;
import org.apache.orc.TypeDescription;

import java.io.IOException;
import java.nio.charset.StandardCharsets;

import static org.apache.orc.EncryptionAlgorithm.AES_CTR_128;

public class InMemoryEncryptionReader {
public static void main(Configuration conf, String[] args) throws IOException {
byte[] kmsKey = "secret123".getBytes(StandardCharsets.UTF_8);
// InMemoryKeystore is used to get key to read encryption data.
InMemoryKeystore keyProvider = new InMemoryKeystore().addKey("pii", AES_CTR_128, kmsKey);
ReaderOptions readerOptions = OrcFile.readerOptions(conf).setKeyProvider(keyProvider);
Reader reader = OrcFile.createReader(new Path("encrypted.orc"), readerOptions);

System.out.println("File schema: " + reader.getSchema());
System.out.println("Row count: " + reader.getNumberOfRows());

// Pick the schema we want to read using schema evolution
TypeDescription schema = TypeDescription.fromString("struct<x:int,y:string>");
// Read the encryption data
VectorizedRowBatch batch = schema.createRowBatch();
RecordReader rowIterator = reader.rows(reader.options().schema(schema));
LongColumnVector x = (LongColumnVector) batch.cols[0];
BytesColumnVector y = (BytesColumnVector) batch.cols[1];
while (rowIterator.nextBatch(batch)) {
for (int row = 0; row < batch.size; ++row) {
System.out.println("x: " + x.vector[row]);
System.out.println("y: " + y.toString(row));
}
}
rowIterator.close();
}

public static void main(String[] args) throws IOException {
main(new Configuration(), args);
}
}
@@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.orc.examples;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.orc.InMemoryKeystore;
import org.apache.orc.OrcFile;
import org.apache.orc.OrcFile.WriterOptions;
import org.apache.orc.TypeDescription;
import org.apache.orc.Writer;

import java.io.IOException;
import java.nio.charset.StandardCharsets;

import static org.apache.orc.EncryptionAlgorithm.AES_CTR_128;

public class InMemoryEncryptionWriter {
public static void main(Configuration conf, String[] args) throws IOException {
TypeDescription schema = TypeDescription.fromString("struct<x:int,y:string>");
byte[] kmsKey = "secret123".getBytes(StandardCharsets.UTF_8);
// The primary use of InMemoryKeystore is for used who doesn't have a
// Hadoop KMS.
InMemoryKeystore provider = new InMemoryKeystore().addKey("pii", AES_CTR_128, kmsKey);
String encryption = "pii:x,y";
WriterOptions writerOptions =
OrcFile.writerOptions(conf).setSchema(schema).setKeyProvider(provider).encrypt(encryption);
Writer writer = OrcFile.createWriter(new Path("encrypted.orc"), writerOptions);

VectorizedRowBatch batch = schema.createRowBatch();
LongColumnVector x = (LongColumnVector) batch.cols[0];
BytesColumnVector y = (BytesColumnVector) batch.cols[1];
for (int r = 0; r < 10000; ++r) {
int row = batch.size++;
x.vector[row] = r;
byte[] buffer = ("byte-" + r).getBytes();
y.setRef(row, buffer, 0, buffer.length);
// If the batch is full, write it out and start over.
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
}
if (batch.size != 0) {
writer.addRowBatch(batch);
}
writer.close();
}

public static void main(String[] args) throws IOException {
main(new Configuration(), args);
}
}

0 comments on commit 31acaaa

Please sign in to comment.