Permalink
Browse files

merging from trunk

  • Loading branch information...
Bill Graham
Bill Graham committed Aug 9, 2012
2 parents 60bc756 + 3748c0e commit 383ca14613b462eb81381d6f0d5ad91810bd1ede
Showing with 2,540 additions and 869 deletions.
  1. +38 −0 CHANGES.txt
  2. +3 −0 build.xml
  3. +5 −0 conf/pig.properties
  4. +3 −3 contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/avro/AvroSchema2Pig.java
  5. +1 −1 contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/avro/AvroStorageUtils.java
  6. +4 −4 contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/avro/PigAvroDatumWriter.java
  7. +54 −3 contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/avro/TestAvroStorage.java
  8. +2 −3 ...piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/avro/TestAvroStorageUtils.java
  9. BIN ...st/java/org/apache/pig/piggybank/test/storage/avro/avro_test_files/test_generic_union_schema.avro
  10. BIN ...c/test/java/org/apache/pig/piggybank/test/storage/avro/avro_test_files/test_recursive_schema.avro
  11. +2 −2 src/docs/src/documentation/content/xdocs/basic.xml
  12. +51 −7 src/docs/src/documentation/content/xdocs/cont.xml
  13. +5 −0 src/docs/src/documentation/content/xdocs/perf.xml
  14. +6 −1 src/docs/src/documentation/content/xdocs/pig-index.xml
  15. +4 −3 src/docs/src/documentation/content/xdocs/start.xml
  16. +170 −2 src/docs/src/documentation/content/xdocs/udf.xml
  17. +31 −0 src/org/apache/pig/PigConfiguration.java
  18. +11 −5 src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/InputSizeReducerEstimator.java
  19. +66 −25 src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
  20. +7 −26 src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java
  21. +6 −0 src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceOper.java
  22. +1 −1 src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapBase.java
  23. +1 −1 src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapReduce.java
  24. +21 −0 src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigRecordReader.java
  25. +1 −1 src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigReducerEstimator.java
  26. +30 −3 src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/POUserFunc.java
  27. +417 −78 src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/util/PlanHelper.java
  28. +5 −7 src/org/apache/pig/builtin/CubeDimensions.java
  29. +91 −0 src/org/apache/pig/builtin/RollupDimensions.java
  30. +6 −1 src/org/apache/pig/data/AbstractTuple.java
  31. +0 −12 src/org/apache/pig/data/DefaultTuple.java
  32. +76 −10 src/org/apache/pig/data/SchemaTuple.java
  33. +21 −9 src/org/apache/pig/data/SchemaTupleClassGenerator.java
  34. +0 −4 src/org/apache/pig/data/SchemaTupleFactory.java
  35. +53 −16 src/org/apache/pig/newplan/logical/relational/LOCube.java
  36. +16 −3 src/org/apache/pig/parser/AliasMasker.g
  37. +16 −4 src/org/apache/pig/parser/AstPrinter.g
  38. +17 −4 src/org/apache/pig/parser/AstValidator.g
  39. +228 −84 src/org/apache/pig/parser/LogicalPlanBuilder.java
  40. +53 −29 src/org/apache/pig/parser/LogicalPlanGenerator.g
  41. +3 −0 src/org/apache/pig/parser/QueryLexer.g
  42. +10 −3 src/org/apache/pig/parser/QueryParser.g
  43. +8 −3 src/org/apache/pig/parser/QueryParserDriver.java
  44. +5 −10 src/org/apache/pig/parser/QueryParserUtils.java
  45. +4 −4 src/org/apache/pig/pen/LocalMapReduceSimulator.java
  46. +3 −3 src/org/apache/pig/tools/pigstats/ScriptState.java
  47. +1 −0 test/e2e/pig/conf/local.conf
  48. +1 −1 test/e2e/pig/tests/negative.conf
  49. +1 −1 test/e2e/pig/tests/nightly.conf
  50. +2 −2 test/e2e/pig/tests/turing_jython.conf
  51. +29 −0 test/org/apache/pig/data/TestSchemaTuple.java
  52. +1 −1 test/org/apache/pig/parser/TestLexer.pig
  53. +3 −3 test/org/apache/pig/parser/TestLogicalPlanGenerator.java
  54. +2 −2 test/org/apache/pig/parser/TestParser.pig
  55. +1 −1 test/org/apache/pig/parser/TestQueryLexer.java
  56. +29 −18 test/org/apache/pig/parser/TestQueryParser.java
  57. +347 −284 test/org/apache/pig/test/TestCubeOperator.java
  58. +1 −34 test/org/apache/pig/test/TestInvoker.java
  59. +68 −0 test/org/apache/pig/test/TestInvokerSpeed.java
  60. +47 −25 test/org/apache/pig/test/TestJobSubmission.java
  61. +112 −122 test/org/apache/pig/test/TestMacroExpansion.java
  62. +266 −0 test/org/apache/pig/test/TestNumberOfReducers.java
  63. +57 −0 test/org/apache/pig/test/TestRollupDimensions.java
  64. +16 −0 test/org/apache/pig/test/Util.java
  65. +1 −0 test/unit-tests
View
@@ -24,6 +24,28 @@ INCOMPATIBLE CHANGES
IMPROVEMENTS
+PIG-2862: Hardcode certain tuple lengths into the TUPLE BinInterSedes byte identifier (jcoveney)
+
+PIG-2855: Provide a method to measure time spent in UDFs (dvryaboy)
+
+PIG-2837: AvroStorage throws StackOverFlowError (cheolsoo via sms)
+
+PIG-2856: AvroStorage doesn't load files in the directories when a glob pattern matches both files and directories. (cheolsoo via sms)
+
+PIG-2569: Fix org.apache.pig.test.TestInvoker.testSpeed (aklochkov via dvryaboy)
+
+PIG-2858: Improve PlanHelper to allow finding any PhysicalOperator in a plan (dvryaboy)
+
+PIG-2854: AvroStorage doesn't work with Avro 1.7.1 (cheolsoo via sms)
+
+PIG-2779: Refactoring the code for setting number of reducers (jay23jack via billgraham)
+
+PIG-2765: Implementing RollupDimensions UDF and adding ROLLUP clause in CUBE operator (prasanth_j via dvryaboy)
+
+PIG-2814: Fix issues with Sample operator documentation (prasanth_j via dvryaboy)
+
+PIG-2817: Documentation for Groovy UDFs (herberts via julien)
+
PIG-2492: AvroStorage should recognize globs and commas (cheolsoo via sms)
PIG-2706: Add clear to list of grunt commands (xalan via azaroth)
@@ -208,6 +230,18 @@ OPTIMIZATIONS
BUG FIXES
+PIG-2860: [piggybank] TestAvroStorageUtils.testGetConcretePathFromGlob fails on some version of hadoop (cheolsoo via jcoveney)
+
+PIG-2861: PlanHelper imports org.python.google.common.collect.Lists instead of org.google.common.collect.Lists (jcoveney)
+
+PIG-2849: Errors in document Getting Started (miyakawataku via billgraham)
+
+PIG-2843: Typo in Documentation (eric59 via billgraham)
+
+PIG-2841: Inconsistent URL in Docs (eric59 via billgraham)
+
+PIG-2740: get rid of "java[77427:1a03] Unable to load realm info from SCDynamicStore" log lines when running pig tests (julien)
+
PIG-2839: mock.Storage overwrites output with the last relation written when storing UNION (julien)
PIG-2840: Fix SchemaTuple bugs (jcoveney)
@@ -459,6 +493,10 @@ PIG-2228: support partial aggregation in map task (thejas)
BUG FIXES
+PIG-2859: Fix few e2e test failures (rohini via daijy)
+
+PIG-2729: Macro expansion does not use pig.import.search.path - UnitTest borked (johannesch via daijy)
+
PIG-2783: Fix Iterator_1 e2e test for Hadoop 23 (rohini via daijy)
PIG-2761: With hadoop23 importing modules inside python script does not work (rohini via daijy)
View
@@ -825,6 +825,8 @@
<sysproperty key="hod.param" value="${hod.param}"/> -->
<sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
<jvmarg line="${debugArgs}"/>
+ <sysproperty key="java.security.krb5.realm" value="" />
+ <sysproperty key="java.security.krb5.kdc" value="" />
<classpath>
<pathelement location="${output.jarfile.withouthadoop}" />
<pathelement location="${test.build.classes}" />
@@ -843,6 +845,7 @@
<excludesfile name="${test.exclude.file.23}" if="isHadoop23" />
</patternset>
<exclude name="**/${exclude.testcase}.java" if="exclude.testcase" />
+ <exclude name="**/TestInvokerSpeed.java" if="clover.enabled"/>
</fileset>
</batchtest>
<batchtest fork="yes" todir="${test.log.dir}" if="testcase">
View
@@ -59,6 +59,11 @@
#using more counters than hadoop configured limit
#pig.disable.counter=true
+# Use this option to turn on UDF timers. This will cause two
+# counters to be tracked for every UDF and LoadFunc in your script:
+# approx_microsecs measures approximate time spent inside a UDF
+# approx_invocations reports the approximate number of times the UDF was invoked
+# pig.udf.profile=false
#When enabled, 'describe' prints a multi-line formatted schema
#(similar to an indended json) rather than on a single line.
@@ -47,12 +47,12 @@ public static ResourceFieldSchema getPigSchema(byte pigType, String fieldName) {
*/
public static ResourceSchema convert(Schema schema) throws IOException {
- if (AvroStorageUtils.containsGenericUnion(schema))
- throw new IOException ("We don't accept schema containing generic unions.");
-
if (AvroStorageUtils.containsRecursiveRecord(schema))
throw new IOException ("We don't accept schema containing recursive records.");
+ if (AvroStorageUtils.containsGenericUnion(schema))
+ throw new IOException ("We don't accept schema containing generic unions.");
+
ResourceFieldSchema inSchema = inconvert(schema, FIELD);
ResourceSchema tupleSchema;
@@ -124,7 +124,7 @@ static boolean getAllSubDirs(Path path, Job job, Set<Path> paths) throws IOExcep
}
for (FileStatus file : matchedFiles) {
if (file.isDir()) {
- for (FileStatus sub : fs.listStatus(path)) {
+ for (FileStatus sub : fs.listStatus(file.getPath())) {
getAllSubDirs(sub.getPath(), job, paths);
}
} else {
@@ -108,15 +108,15 @@ protected void write(Schema schema, Object datum, Encoder out)
*/
protected void writeUnion(Schema schema, Object datum, Encoder out)
throws IOException {
- int index = resolveUnion(schema, datum);
+ int index = resolveUnionSchema(schema, datum);
out.writeIndex(index);
write(schema.getTypes().get(index), datum, out);
}
/**
* Called to resolve union.
*/
- protected int resolveUnion(Schema union, Object datum) throws IOException {
+ protected int resolveUnionSchema(Schema union, Object datum) throws IOException {
int i = 0;
for (Schema type : union.getTypes()) {
if (type.getType().equals(Schema.Type.UNION))
@@ -130,7 +130,7 @@ protected int resolveUnion(Schema union, Object datum) throws IOException {
/**
* Recursively check whether "datum" is an instance of "schema" and called
- * by {@link #resolveUnion(Schema,Object)},
+ * by {@link #resolveUnionSchema(Schema,Object)},
* {@link #unwrappedInstanceOf(Schema,Object)}.
*
*/
@@ -156,7 +156,7 @@ protected boolean instanceOf(Schema schema, Object datum)
case UNION:
@SuppressWarnings("unused")
- int index = resolveUnion(schema, datum);
+ int index = resolveUnionSchema(schema, datum);
return true;
case ENUM:
return datum instanceof String && schema.hasEnumSymbol(((String) datum))
@@ -29,7 +29,10 @@
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.backend.executionengine.ExecException;
+import org.apache.pig.backend.executionengine.ExecJob;
+import org.apache.pig.backend.executionengine.ExecJob.JOB_STATUS;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobCreationException;
+import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.piggybank.storage.avro.AvroStorage;
import org.apache.pig.piggybank.storage.avro.PigSchema2Avro;
import org.apache.pig.test.Util;
@@ -41,6 +44,7 @@
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
+import java.util.List;
import java.util.Properties;
import java.util.Set;
@@ -78,6 +82,8 @@ private static String getInputFile(String file) {
final private String testArrayFile = getInputFile("test_array.avro");
final private String testRecordFile = getInputFile("test_record.avro");
final private String testRecordSchema = getInputFile("test_record.avsc");
+ final private String testRecursiveSchemaFile = getInputFile("test_recursive_schema.avro");
+ final private String testGenericUnionSchemaFile = getInputFile("test_generic_union_schema.avro");
final private String testTextFile = getInputFile("test_record.txt");
final private String testSingleTupleBagFile = getInputFile("messages.avro");
final private String testNoExtensionFile = getInputFile("test_no_extension");
@@ -93,6 +99,48 @@ public static void teardown() {
if(pigServerLocal != null) pigServerLocal.shutdown();
}
+ @Test
+ public void testRecursiveSchema() throws IOException {
+ // Verify that a FrontendException is thrown if schema is recursive.
+ String output= outbasedir + "testRecursiveSchema";
+ deleteDirectory(new File(output));
+ String [] queries = {
+ " in = LOAD '" + testRecursiveSchemaFile +
+ "' USING org.apache.pig.piggybank.storage.avro.AvroStorage ();",
+ " STORE in INTO '" + output +
+ "' USING org.apache.pig.piggybank.storage.avro.AvroStorage ();"
+ };
+ try {
+ testAvroStorage(queries);
+ Assert.fail();
+ } catch (FrontendException e) {
+ // The IOException thrown by AvroStorage for recursive schema is caught
+ // by the Pig frontend, and FrontendException is re-thrown.
+ assertTrue(e.getMessage().contains("Cannot get schema"));
+ }
+ }
+
+ @Test
+ public void testGenericUnionSchema() throws IOException {
+ // Verify that a FrontendException is thrown if schema has generic union.
+ String output= outbasedir + "testGenericUnionSchema";
+ deleteDirectory(new File(output));
+ String [] queries = {
+ " in = LOAD '" + testGenericUnionSchemaFile +
+ "' USING org.apache.pig.piggybank.storage.avro.AvroStorage ();",
+ " STORE in INTO '" + output +
+ "' USING org.apache.pig.piggybank.storage.avro.AvroStorage ();"
+ };
+ try {
+ testAvroStorage(queries);
+ Assert.fail();
+ } catch (FrontendException e) {
+ // The IOException thrown by AvroStorage for generic union is caught
+ // by the Pig frontend, and FrontendException is re-thrown.
+ assertTrue(e.getMessage().contains("Cannot get schema"));
+ }
+ }
+
@Test
public void testDir() throws IOException {
// Verify that all files in a directory including its sub-directories are loaded.
@@ -112,7 +160,7 @@ public void testDir() throws IOException {
public void testGlob1() throws IOException {
// Verify that the a glob pattern matches files properly.
String output = outbasedir + "testGlob1";
- String expected = basedir + "expected_test_dir_1.avro";
+ String expected = basedir + "expected_testDir.avro";
deleteDirectory(new File(output));
String [] queries = {
" in = LOAD '" + testDir1AllFiles + "' USING org.apache.pig.piggybank.storage.avro.AvroStorage ();",
@@ -492,11 +540,14 @@ private void testAvroStorage(String ...queries) throws IOException {
if (query != null && query.length() > 0)
pigServerLocal.registerQuery(query);
}
- pigServerLocal.executeBatch();
+ List<ExecJob> jobs = pigServerLocal.executeBatch();
+ for (ExecJob job : jobs) {
+ assertEquals(JOB_STATUS.COMPLETED, job.getStatus());
+ }
}
private void verifyResults(String outPath, String expectedOutpath) throws IOException {
- verifyResults(outPath, expectedOutpath, null);
+ verifyResults(outPath, expectedOutpath, null);
}
private void verifyResults(String outPath, String expectedOutpath, String expectedCodec) throws IOException {
@@ -128,8 +128,7 @@ public void testGenericUnion() throws IOException {
@Test
public void testGetConcretePathFromGlob() throws IOException {
- final String defaultscheme = "file:";
- final String basedir = System.getProperty("user.dir");
+ final String basedir = "file://" + System.getProperty("user.dir");
final String tempdir = Long.toString(System.currentTimeMillis());
final String nonexistentpath = basedir + "/" + tempdir + "/this_path_does_not_exist";
@@ -140,7 +139,7 @@ public void testGetConcretePathFromGlob() throws IOException {
// existent path
String locationStr = basedir;
concretePath = AvroStorageUtils.getConcretePathFromGlob(locationStr, job);
- assertEquals(defaultscheme + basedir, concretePath.toString());
+ assertEquals(basedir, concretePath.toUri().toString());
// non-existent path
locationStr = nonexistentpath;
@@ -6922,7 +6922,7 @@ DUMP X;
<!-- =========================================================================== -->
<section id="sample">
<title>SAMPLE</title>
- <p>Partitions a relation into two or more relations.</p>
+ <p>Selects a random sample of data based on the specified sample size.</p>
<section>
<title>Syntax</title>
@@ -6952,7 +6952,7 @@ DUMP X;
<td>
<p>Sample size, either</p>
<ul>
- <li>a constant, rage 0 to 1 (for example, enter 0.1 for 10%)</li>
+ <li>a constant, range 0 to 1 (for example, enter 0.1 for 10%)</li>
<li>a scalar used in an expression</li>
</ul>
<p></p>
@@ -24,17 +24,17 @@
<!-- ============================================ -->
<section id="embed-python">
-<title>Embedded Pig - Python and JavaScript </title>
+<title>Embedded Pig - Python, JavaScript and Groovy</title>
-<p>To enable control flow, you can embed Pig Latin statements and Pig commands in the Python and JavaScript scripting languages using a JDBC-like compile, bind, run model. For Python, make sure the Jython jar is included in your class path. For JavaScript, make sure the Rhino jar is included in your classpath.</p>
+<p>To enable control flow, you can embed Pig Latin statements and Pig commands in the Python, JavaScript and Groovy scripting languages using a JDBC-like compile, bind, run model. For Python, make sure the Jython jar is included in your class path. For JavaScript, make sure the Rhino jar is included in your classpath. For Groovy, make sure the groovy-all jar is included in your classpath.</p>
-<p>Note that host languages and the languages of UDFs (included as part of the embedded Pig) are completely orthogonal. For example, a Pig Latin statement that registers a Python UDF may be embedded in Python, JavaScript, or Java. The exception to this rule is "combined" scripts – here the languages must match (see the <a href="udf.html#python-advanced">Advanced Topics for Python</a> and <a href="udf.html#js-advanced">Advanced Topics for JavaScript</a>). </p>
+<p>Note that host languages and the languages of UDFs (included as part of the embedded Pig) are completely orthogonal. For example, a Pig Latin statement that registers a Python UDF may be embedded in Python, JavaScript, or Java. The exception to this rule is "combined" scripts – here the languages must match (see the <a href="udf.html#python-advanced">Advanced Topics for Python</a>, <a href="udf.html#js-advanced">Advanced Topics for JavaScript</a> and <a href="udf.html#groovy-advanced">Advanced Topics for Groovy</a>). </p>
<!-- ============================================== -->
<section id="invocation-basics">
<title>Invocation Basics</title>
-<p>Embedded Pig is supported in batch mode only, not interactive mode. You can request that embedded Pig be used by adding the <code>--embedded</code> option to the Pig command line. If this option is passed as an argument, that argument will refer to the language Pig is embedded in, either Python or JavaScript. If no argument is specified, it is taken to refer to the reference implementation for Python.</p>
+<p>Embedded Pig is supported in batch mode only, not interactive mode. You can request that embedded Pig be used by adding the <code>--embedded</code> option to the Pig command line. If this option is passed as an argument, that argument will refer to the language Pig is embedded in, either Python, JavaScript or Groovy. If no argument is specified, it is taken to refer to the reference implementation for Python.</p>
<p><strong>Python</strong></p>
<source>
@@ -95,6 +95,33 @@ function main() {
}
</source>
+<p><strong>Groovy</strong></p>
+<source>
+$ pig myembedded.groovy
+OR
+$ java -cp &lt;groovy-all jar&gt;:&lt;pig jars&gt;; [--embedded groovy] /tmp/myembedded.groovy
+</source>
+<p></p>
+<p>Pig will look for the *.groovy extension in the script.</p>
+<source>
+import org.apache.pig.scripting.Pig;
+
+public static void main(String[] args) {
+ String input = "original"
+ String output = "output"
+
+ Pig P = Pig.compile("A = load '\$in'; store A into '\$out';")
+
+ result = P.bind(['in':input, 'out':output]).runSingle()
+
+ if (result.isSuccessful()) {
+ print("Pig job succeeded")
+ } else {
+ print("Pig job failed")
+ }
+}
+</source>
+
<p><strong>Invocation Process</strong></p>
<p>You invoke Pig in the host scripting language through an embedded <a href="#pig-Object">Pig object</a>. </p>
@@ -284,7 +311,7 @@ result = Q.runSingle()
<p>
2. Command line arguments
-Currently this feature is only available in Python. You can pass command line arguments (the arguments after the script file name) to Python. These will become sys.argv in Python. For example: pig script.py student.txt. The corresponding script is:
+Currently this feature is only available in Python and Groovy. You can pass command line arguments (the arguments after the script file name) to Python. These will become sys.argv in Python and will be passed as main's arguments in Groovy. For example: pig script.py student.txt. The corresponding script is:
</p>
<source>
#!/usr/bin/python
@@ -297,6 +324,23 @@ Q = P.bind()
result = Q.runSingle()
</source>
+
+<p>and in Groovy, pig script.groovy student.txt:</p>
+
+<source>
+import org.apache.pig.scripting.Pig;
+
+public static void main(String[] args) {
+
+ P = Pig.compile("A = load '" + args[1] + "' as (name, age, gpa);" +
+ "store A into 'output';");
+
+ Q = P.bind()
+
+ result = Q.runSingle()
+}
+</source>
+
</section>
</section>
@@ -782,7 +826,7 @@ public abstract class PigStats {
<p>To enable control flow, you can embed Pig Latin statements and Pig commands in the Java programming language. </p>
-<p>Note that host languages and the languages of UDFs (included as part of the embedded Pig) are completely orthogonal. For example, a Pig Latin statement that registers a Java UDF may be embedded in Python, JavaScript, or Java. The exception to this rule is "combined" scripts – here the languages must match (see the <a href="udf.html#python-advanced">Advanced Topics for Python</a> and <a href="udf.html#js-advanced">Advanced Topics for JavaScript</a>). </p>
+<p>Note that host languages and the languages of UDFs (included as part of the embedded Pig) are completely orthogonal. For example, a Pig Latin statement that registers a Java UDF may be embedded in Python, JavaScript, Groovy, or Java. The exception to this rule is "combined" scripts – here the languages must match (see the <a href="udf.html#python-advanced">Advanced Topics for Python</a>, <a href="udf.html#js-advanced">Advanced Topics for JavaScript</a> and <a href="udf.html#groovy-advanced">Advanced Topics for Groovy</a>). </p>
<section id="pigserver">
<title>PigServer Interface</title>
@@ -1175,7 +1219,7 @@ IMPORT 'my_macro.pig';
<!-- =========================================== -->
- <section id="parameter-sub">
+ <section id="Parameter-Sub">
<title>Parameter Substitution</title>
<section>
Oops, something went wrong.

0 comments on commit 383ca14

Please sign in to comment.