Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Initial Import

  • Loading branch information...
commit 790a1aa146313fa58ce50f9ca3db801fcf467d0a 0 parents
@johnynek johnynek authored caniszczyk committed
Showing with 6,293 additions and 0 deletions.
  1. +9 −0 .gitignore
  2. +177 −0 LICENSE
  3. +13 −0 NOTICE
  4. +67 −0 README.md
  5. +8 −0 project/build.properties
  6. +25 −0 project/build/Project.scala
  7. +13 −0 project/plugins/Plugins.scala
  8. +206 −0 scripts/scald.rb
  9. +56 −0 scripts/scalding_gen.rb
  10. +99 −0 src/main/scala/com/twitter/scalding/Args.scala
  11. +348 −0 src/main/scala/com/twitter/scalding/DateRange.scala
  12. +128 −0 src/main/scala/com/twitter/scalding/FieldConversions.scala
  13. +1,112 −0 src/main/scala/com/twitter/scalding/GeneratedConversions.scala
  14. +448 −0 src/main/scala/com/twitter/scalding/GroupBuilder.scala
  15. +95 −0 src/main/scala/com/twitter/scalding/Job.scala
  16. +313 −0 src/main/scala/com/twitter/scalding/KryoHadoopSerialization.scala
  17. +56 −0 src/main/scala/com/twitter/scalding/MemoryTap.scala
  18. +76 −0 src/main/scala/com/twitter/scalding/Mode.scala
  19. +285 −0 src/main/scala/com/twitter/scalding/Operations.scala
  20. +312 −0 src/main/scala/com/twitter/scalding/RichPipe.scala
  21. +298 −0 src/main/scala/com/twitter/scalding/Source.scala
  22. +96 −0 src/main/scala/com/twitter/scalding/Tool.scala
  23. +89 −0 src/main/scala/com/twitter/scalding/TupleBase.scala
  24. +105 −0 src/main/scala/com/twitter/scalding/TupleConversions.scala
  25. +50 −0 src/main/scala/com/twitter/scalding/examples/MergeTest.scala
  26. +179 −0 src/main/scala/com/twitter/scalding/examples/PageRank.scala
  27. +10 −0 src/main/scala/com/twitter/scalding/examples/WordCountJob.scala
  28. +56 −0 src/test/scala/com/twitter/scalding/ArgTest.scala
  29. +382 −0 src/test/scala/com/twitter/scalding/CoreTest.scala
  30. +159 −0 src/test/scala/com/twitter/scalding/DateTest.scala
  31. +128 −0 src/test/scala/com/twitter/scalding/FieldImpsTest.scala
  32. +65 −0 src/test/scala/com/twitter/scalding/JobTest.scala
  33. +129 −0 src/test/scala/com/twitter/scalding/KryoTests.scala
  34. +38 −0 src/test/scala/com/twitter/scalding/PageRankTest.scala
  35. +20 −0 src/test/scala/com/twitter/scalding/WordCountTest.scala
  36. +223 −0 tutorial/CodeSnippets.md
  37. +75 −0 tutorial/Tutorial0.scala
  38. +53 −0 tutorial/Tutorial1.scala
  39. +64 −0 tutorial/Tutorial2.scala
  40. +70 −0 tutorial/Tutorial3.scala
  41. +69 −0 tutorial/Tutorial4.scala
  42. +87 −0 tutorial/Tutorial5.scala
  43. +2 −0  tutorial/data/hello.txt
9 .gitignore
@@ -0,0 +1,9 @@
+*.swp
+BUILD
+target/
+lib_managed/
+project/boot/
+project/build/target/
+project/plugins/target/
+project/plugins/lib_managed/
+project/plugins/src_managed/
177 LICENSE
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
13 NOTICE
@@ -0,0 +1,13 @@
+scalding is a Scala API for Cascading.
+Copyright 2012 Twitter, Inc.
+
+Third Party Dependencies:
+
+Cascading 2.0
+Apache Public License 2.0
+http://www.cascading.org
+
+Hadoop 0.20.2
+Apache Public License 2.0
+http://hadoop.apache.org
+
67 README.md
@@ -0,0 +1,67 @@
+# Scalding
+Scalding is a library that has two components:
+
+* a scala DSL to make map-reduce computations look very similar to scala's collection API
+* a wrapper to Cascading to make simpler to define the usual use cases of jobs, tests and describing new data on HDFS.
+
+To run scala scalding jobs, a script, scald.rb is provided in scripts/. Run this script
+with no arguments to see usage tips. You will need to customize the default variables
+at the head of that script for your environment.
+
+You should follow the scalding project on twitter: <http://twitter.com/scalding>
+
+## Word Count
+Hadoop is a distributed system for counting words. Here is how it's done in scalding. You can find this in examples:
+
+ package com.twitter.scalding.examples
+
+ import com.twitter.scalding._
+
+ class WordCountJob(args : Args) extends Job(args) {
+ TextLine( args("input") ).read.
+ flatMap('line -> 'word) { line : String => line.split("\\s+") }.
+ groupBy('word) { _.size }.
+ write( Tsv( args("output") ) )
+ }
+
+##Tutorial
+See tutorial/ for examples of how to use the DSL. See tutorial/CodeSnippets.md for some
+example scalding snippets.
+
+## Building
+0. Install sbt 0.7.4
+1. sbt update (takes 2 minutes or more)
+2. sbt test
+3. sbt package-dist
+
+use "sbt assembly" if you need to make a fat jar with all dependencies (recommended to work with
+scald.rb in scripts).
+
+## Mailing list
+
+Currently we are using the cascading-user mailing list for discussions.
+<http://groups.google.com/group/cascading-user>
+
+Follow the scalding project on twitter for updates: <http://twitter.com/scalding>
+
+## Bugs
+In the remote possibility that there exist bugs in this code, please report them to:
+<https://github.com/twitter/scalding/issues>
+
+## Authors:
+* Avi Bryant <http://twitter.com/avibryant>
+* Oscar Boykin <http://twitter.com/posco>
+* Argyris Zymnis <http://twitter.com/argyris>
+
+Thanks for assistance and contributions:
+
+* Chris Wensel <http://twitter.com/cwensel>
+* Ning Liang <http://twitter.com/ningliang>
+* Dmitriy Ryaboy <http://twitter.com/squarecog>
+* Dong Wang <http://twitter.com/dongwang218>
+* Edwin Chen <http://twitter.com/edchedch>
+
+## License
+Copyright 2012 Twitter, Inc.
+
+Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0
8 project/build.properties
@@ -0,0 +1,8 @@
+#Project properties
+#Tue Dec 13 13:52:46 PST 2011
+project.organization=com.twitter
+project.name=scalding
+sbt.version=0.7.4
+project.version=0.2.0
+build.scala.versions=2.8.1
+project.initialize=false
25 project/build/Project.scala
@@ -0,0 +1,25 @@
+import sbt._
+import com.twitter.sbt._
+import reaktor.scct.ScctProject
+
+class Project(info: ProjectInfo) extends StandardProject(info)
+ with PackageDist
+ with IdeaProject
+ with assembly.AssemblyBuilder
+ with ScctProject {
+
+ override def distZipName = "%s.zip".format(name)
+
+ //Setup the cascading maven repo:
+ val conjars = "Concurrent Maven Repo" at "http://conjars.org/repo"
+ val c2core = "cascading" % "cascading-core" % "2.0.0-wip-165"
+ val c2local = "cascading" % "cascading-local" % "2.0.0-wip-165"
+ val c2hadoop = "cascading" % "cascading-hadoop" % "2.0.0-wip-165"
+
+ val hadoop = "org.apache.hadoop" % "hadoop-core" % "0.20.2"
+ val commons = "commons-lang" % "commons-lang" % "2.4"
+ val kryo = "de.javakaffee" % "kryo-serializers" % "0.9"
+
+ // Testing
+ val specs = "org.scala-tools.testing" % "specs_2.8.0" % "1.6.5" % "test"
+}
13 project/plugins/Plugins.scala
@@ -0,0 +1,13 @@
+import sbt._
+
+class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
+ val twitterRepo = "com.twitter" at "http://maven.twttr.com"
+ val defaultProject = "com.twitter" % "standard-project" % "0.9.25"
+ val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
+ val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.3.0"
+ val scctRepo = "scct-repo" at "http://mtkopone.github.com/scct/maven-repo/"
+ lazy val scctPlugin = "reaktor" % "sbt-scct-for-2.8" % "0.1-SNAPSHOT"
+
+ val codaRepo = "Coda Hale's Repository" at "http://repo.codahale.com/"
+ val assemblySBT = "com.codahale" % "assembly-sbt" % "0.1"
+}
206 scripts/scald.rb
@@ -0,0 +1,206 @@
+#!/usr/bin/env ruby
+require 'fileutils'
+require 'thread'
+
+#Usage : scald.rb [--hdfs|--local|--print] job <job args>
+# --hdfs: if job ends in ".scala" or ".java" and the file exists, link it against JARFILE (below) and then run it on HOST.
+# else, it is assumed to be a full classname to an item in the JARFILE, which is run on HOST
+# --local: run locally according to the rules above
+# --print: print the command YOU SHOULD ENTER on the remote node. Useful for screen sessions.
+
+# Configuration in script
+##############################################################
+REDUCERS=100
+
+#Get the absolute path of the original (non-symlink) file.
+ORIGINAL_FILE=File.symlink?(__FILE__) ? File.readlink(__FILE__) : __FILE__
+SCIENCE_ROOT=File.expand_path(File.dirname(ORIGINAL_FILE)+"/../")
+JARFILE=SCIENCE_ROOT + "/target/scalding-assembly-0.2.0.jar" #what jar has all the depencies for this job
+puts JARFILE
+HOST="hadoopnest1.corp.twitter.com" #where the job is rsynced to and run
+TMPDIR="/tmp"
+BUILDDIR=TMPDIR+"/script-build"
+LOCALMEM="3g" #how much memory for java to use when running in local mode
+##############################################################
+
+if ARGV.size < 1
+ $stderr.puts("ERROR: insufficient args.")
+ #Make sure to print out up to Configuration above:
+ system("head -n 19 #{__FILE__} | tail -n+4")
+ exit(1)
+end
+
+MODE = case ARGV[0]
+ when "--hdfs"
+ ARGV.shift
+ when "--local"
+ ARGV.shift
+ when "--print"
+ ARGV.shift
+ else
+ #default:
+ "--hdfs"
+end
+
+JOBFILE=ARGV.shift
+
+def file_type
+ JOBFILE =~ /\.(scala|java)$/
+ $1
+end
+
+def is_file?
+ !file_type.nil?
+end
+
+PACK_RE = /^package ([^;]+)/
+JOB_RE = /class\s+([^\s(]+).*extends\s+.*Job/
+EXTENSION_RE = /(.*)\.(scala|java)$/
+
+#Get the name of the job from the file.
+#the rule is: last class in the file, or the one that matches the filename
+def get_job_name(file)
+ package = ""
+ job = nil
+ default = nil
+ if file =~ EXTENSION_RE
+ default = $1
+ File.readlines(file).each { |s|
+ if s =~ PACK_RE
+ package = $1.chop + "."
+ elsif s =~ JOB_RE
+ unless job and default and (job.downcase == default.downcase)
+ #use either the last class, or the one with the same name as the file
+ job = $1
+ end
+ end
+ }
+ raise "Could not find job name" unless job
+ "#{package}#{job}"
+ else
+ file
+ end
+end
+
+JARPATH=File.expand_path(JARFILE)
+JARBASE=File.basename(JARFILE)
+JOBPATH=File.expand_path(JOBFILE)
+JOB=get_job_name(JOBFILE)
+JOBJAR=JOB+".jar"
+JOBJARPATH=TMPDIR+"/"+JOBJAR
+
+#These are all the threads we need to join before finishing
+THREADS = []
+
+#If any of the threads cannot finish their work, we add an error message here:
+FAILURES = []
+FAILURES_MTX = Mutex.new
+def add_failure_msg(msg)
+ FAILURES_MTX.synchronize {
+ FAILURES << msg
+ }
+end
+
+#this is used to record the last time we rsynced
+def rsync_stat_file(filenm)
+ TMPDIR+"/"+"."+filenm.gsub(/\//,'.')+".touch"
+end
+
+#In another thread, rsync the file. If it succeeds, touch the rsync_stat_file
+def rsync(from, to)
+ rtouch = rsync_stat_file(from)
+ if !File.exists?(rtouch) || File.stat(rtouch).mtime < File.stat(from).mtime
+ $stderr.puts("rsyncing #{to} in background...")
+ THREADS << Thread.new(from, to) { |ff,tt|
+ if system("rsync -e ssh -z #{ff} #{HOST}:#{tt}")
+ #this indicates success and notes the time
+ FileUtils.touch(rtouch)
+ else
+ #indicate failure
+ add_failure_msg("Could not rsync: #{ff} to #{HOST}:#{tt}")
+ FileUtils.rm_f(rtouch)
+ end
+ }
+ end
+end
+
+def is_local?
+ (MODE =~ /^--local/) != nil
+end
+
+def needs_rebuild?
+ !File.exists?(JOBJARPATH) || File.stat(JOBJARPATH).mtime < File.stat(JOBPATH).mtime
+end
+
+def build_job_jar
+ $stderr.puts("compiling " + JOBFILE)
+ FileUtils.mkdir_p(BUILDDIR)
+ unless system("#{file_type}c -classpath #{JARPATH} -d #{BUILDDIR} #{JOBFILE}")
+ FileUtils.rm_f(rsync_stat_file(JOBJARPATH))
+ FileUtils.rm_rf(BUILDDIR)
+ exit(1)
+ end
+
+ FileUtils.rm_f(JOBJARPATH)
+ system("jar cf #{JOBJARPATH} -C #{BUILDDIR} .")
+ FileUtils.rm_rf(BUILDDIR)
+end
+
+def hadoop_command
+ "HADOOP_CLASSPATH=/usr/share/java/hadoop-lzo-0.4.14.jar:#{JARBASE}:job-jars/#{JOBJAR} " +
+ "hadoop jar #{JARBASE} -libjars job-jars/#{JOBJAR} -Dmapred.reduce.tasks=#{REDUCERS} #{JOB} --hdfs " +
+ ARGV.join(" ")
+end
+
+def jar_mode_command
+ "hadoop jar #{JARBASE} -Dmapred.reduce.tasks=#{REDUCERS} #{JOB} --hdfs " + ARGV.join(" ")
+end
+
+#Always sync the remote JARFILE
+rsync(JARPATH, JARBASE) if !is_local?
+if is_file?
+ build_job_jar if needs_rebuild?
+
+ if !is_local?
+ #Make sure the job-jars/ directory exists before rsyncing to it
+ system("ssh #{HOST} '[ ! -d job-jars/ ] && mkdir job-jars/'")
+ #rsync only acts if the file is out of date
+ rsync(JOBJARPATH, "job-jars/" + JOBJAR)
+ end
+end
+
+SHELL_COMMAND = case MODE
+ when "--hdfs"
+ if is_file?
+ "ssh -C #{HOST} #{hadoop_command}"
+ else
+ "ssh -C #{HOST} #{jar_mode_command}"
+ end
+ when "--print"
+ if is_file?
+ "echo #{hadoop_command}"
+ else
+ "echo #{jar_mode_command}"
+ end
+ when "--local"
+ if is_file?
+ "java -Xmx#{LOCALMEM} -cp #{JARPATH}:#{JOBJARPATH} com.twitter.scalding.Tool #{JOB} --local " + ARGV.join(" ")
+ else
+ "java -Xmx#{LOCALMEM} -cp #{JARPATH} com.twitter.scalding.Tool #{JOB} --local " + ARGV.join(" ")
+ end
+ else
+ raise "Unrecognized mode: " + MODE
+ end
+
+#Now block on all the threads:
+if THREADS.size > 0
+ puts "Waiting for background threads..."
+ THREADS.each { |rsyncT| rsyncT.join }
+end
+#If there are no errors:
+if FAILURES.size == 0
+ system(SHELL_COMMAND)
+else
+ FAILURES.each { |msg| $stderr.puts msg }
+ exit(1)
+end
56 scripts/scalding_gen.rb
@@ -0,0 +1,56 @@
+#!/usr/bin/env ruby
+
+$indent = " "
+
+def make_tuple_conv(cnt)
+ type_names = ('A'..'Z').to_a[0...cnt]
+ indices = (0...cnt).to_a
+ comma_tn = type_names.join(",")
+ getters = type_names.map { |n|
+ #" g#{n} : TupleGetter[#{n}]"
+ " g#{n} : TupleGetter[#{n}]"
+ }.join(",\n#{$indent}")
+ typed_args = type_names.zip(indices).map { |n,ni|
+ "g#{n}.get(tup, #{ni})"
+ }.join(",\n#{$indent} ")
+ %Q|\n#{$indent}implicit def tuple#{cnt}Converter[#{comma_tn}](implicit
+#{$indent}#{getters}) = new TupleConverter[Tuple#{cnt}[#{comma_tn}]]{
+#{$indent} def apply(tup : Tuple) = {
+#{$indent} Tuple#{cnt}(#{typed_args})
+#{$indent} }
+#{$indent} def arity = #{cnt}
+#{$indent}}
+|
+end
+
+def make_setter(cnt)
+ underscores = (["_"]*cnt).join(",")
+ head = %Q|\n#{$indent}implicit object Tup#{cnt}Setter extends TupleSetter[Tuple#{cnt}[#{underscores}]] {
+#{$indent} override def apply(arg : Tuple#{cnt}[#{underscores}]) = {
+#{$indent} val tup = new Tuple
+#{$indent} |
+ middle = (1..cnt).map {|c| "tup.add(arg._#{c})" }.join("\n#{$indent} ")
+ tail = %Q|
+#{$indent} tup
+#{$indent} }
+
+#{$indent} override def arity = #{cnt}
+#{$indent}}|
+ head + middle + tail
+end
+
+puts "// following were autogenerated by #{__FILE__} at #{Time.now} do not edit"
+puts %q|package com.twitter.scalding
+import cascading.tuple.Tuple
+import cascading.tuple.TupleEntry
+
+trait GeneratedConversions extends LowPriorityConversions {
+|
+
+(1..22).each { |c|
+ puts make_tuple_conv(c)
+ puts make_setter(c)
+}
+
+puts "}"
+puts "// end of autogenerated"
99 src/main/scala/com/twitter/scalding/Args.scala
@@ -0,0 +1,99 @@
+/*
+Copyright 2012 Twitter, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+package com.twitter.scalding
+
+/**
+* The args class does a simple command line parsing. The rules are:
+* keys start with one or more "-". Each key has zero or more values
+* following.
+*/
+object Args {
+ /**
+ * Split on whitespace and then parse.
+ */
+ def apply(argString : String) : Args = Args(argString.split("\\s+"))
+ /**
+ * parses keys as starting with a dash. All following non-dashed args are a list of values.
+ * If the list starts with non-dashed args, these are associated with the
+ * empty string: ""
+ **/
+ def apply(args : Iterable[String]) : Args = {
+ def startingDashes(word : String) = word.takeWhile { _ == '-' }.length
+ new Args(
+ //Fold into a list of (arg -> List[values])
+ args.foldLeft(List("" -> List[String]())){(acc, arg) =>
+ val noDashes = arg.dropWhile{ _ == '-'}
+ if(arg == noDashes)
+ (acc.head._1 -> (arg :: acc.head._2)) :: acc.tail
+ else
+ (noDashes -> List()) :: acc
+ }. //Now reverse the values to keep the same order
+ map {case (key, value) => key -> value.reverse}.toMap
+ )
+ }
+}
+
+@serializable
+class Args(m : Map[String,List[String]]) {
+
+ //Replace or add a given key+args pair:
+ def +(keyvals : (String,Iterable[String])) = {
+ new Args(m + (keyvals._1 -> keyvals._2.toList))
+ }
+
+ /**
+ * Does this Args contain a given key?
+ */
+ def boolean(key : String) = m.contains(key)
+
+ /**
+ * Get the list of values associated with a given key.
+ * if the key is absent, return the empty list. NOTE: empty
+ * does not mean the key is absent, it could be a key without
+ * a value. Use boolean() to check existence.
+ */
+ def list(key : String) = m.get(key).getOrElse(List())
+
+ /**
+ * This is a synonym for required
+ */
+ def apply(key : String) = required(key)
+
+ /**
+ * Equivalent to .optional(key).getOrElse(default)
+ */
+ def getOrElse(key : String, default : String) = optional(key).getOrElse(default)
+
+ /**
+ * return exactly one value for a given key.
+ * If there is more than one value, you get an exception
+ */
+ def required(key : String) = list(key) match {
+ case List() => error("Please provide a value for --" + key)
+ case List(a) => a
+ case _ => error("Please only provide a single value for --" + key)
+ }
+
+ /**
+ * If there is zero or one element, return it as an Option.
+ * If there is a list of more than one item, you get an error
+ */
+ def optional(key : String) : Option[String] = list(key) match {
+ case List() => None
+ case List(a) => Some(a)
+ case _ => error("Please provide at most one value for --" + key)
+ }
+}
348 src/main/scala/com/twitter/scalding/DateRange.scala
@@ -0,0 +1,348 @@
+/*
+Copyright 2012 Twitter, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+package com.twitter.scalding
+
+import scala.annotation.tailrec
+import scala.util.matching.Regex
+
+import java.text.SimpleDateFormat
+import java.util.Calendar
+import java.util.Date
+import java.util.TimeZone
+import java.util.NoSuchElementException
+
+import org.apache.commons.lang.time.DateUtils
+
+/**
+* Holds some coversion functions for dealing with strings as RichDate objects
+*/
+@serializable
+object DateOps {
+ val PACIFIC = TimeZone.getTimeZone("America/Los_Angeles")
+ val UTC = TimeZone.getTimeZone("UTC")
+
+ val DATE_WITH_DASH = "yyyy-MM-dd"
+ val DATEHOUR_WITH_DASH = "yyyy-MM-dd HH"
+ val DATETIME_WITH_DASH = "yyyy-MM-dd HH:mm"
+ val DATETIME_HMS_WITH_DASH = "yyyy-MM-dd HH:mm:ss"
+ val DATETIME_HMSM_WITH_DASH = "yyyy-MM-dd HH:mm:ss.SSS"
+
+ private val DATE_RE = """\d{4}-\d{2}-\d{2}"""
+ private val SEP_RE = """(T?|\s*)"""
+ private val DATE_FORMAT_VALIDATORS = List(DATE_WITH_DASH -> new Regex("""^\s*""" + DATE_RE + """\s*$"""),
+ DATEHOUR_WITH_DASH -> new Regex("""^\s*""" + DATE_RE +
+ SEP_RE + """\d\d\s*$"""),
+ DATETIME_WITH_DASH -> new Regex("""^\s*""" + DATE_RE +
+ SEP_RE + """\d\d:\d\d\s*$"""),
+ DATETIME_HMS_WITH_DASH -> new Regex("""^\s*""" + DATE_RE +
+ SEP_RE + """\d\d:\d\d:\d\d\s*$"""),
+ DATETIME_HMSM_WITH_DASH -> new Regex("""^\s*""" + DATE_RE +
+ SEP_RE + """\d\d:\d\d:\d\d\.\d{1,3}\s*$"""))
+ /**
+ * Return the guessed format for this datestring
+ */
+ def getFormat(s : String) = DATE_FORMAT_VALIDATORS.find{_._2.findFirstIn(s).isDefined}.get._1
+
+ /**
+ * Parse the string with one of the value DATE_FORMAT_VALIDATORS in the order listed above.
+ * We allow either date, date with time in minutes, date with time down to seconds.
+ * The separator between date and time can be a space or "T".
+ */
+ implicit def stringToRichDate(str : String)(implicit tz : TimeZone) = {
+ try {
+ //We allow T to separate dates and times, just remove it and then validate:
+ val newStr = str.replace("T"," ")
+ val fmtStr = getFormat(newStr)
+ val cal = Calendar.getInstance(tz)
+ val formatter = new SimpleDateFormat(fmtStr)
+ formatter.setCalendar(cal)
+ new RichDate(formatter.parse(newStr))
+ } catch {
+ case e: NoSuchElementException => throw new IllegalArgumentException("Could not convert string: '" + str + "' into a date.")
+ }
+ }
+ implicit def longToRichDate(ts : Long) = new RichDate(new Date(ts))
+ implicit def dateToRichDate(d : Date) = new RichDate(d)
+ implicit def richDateToDate(rd : RichDate) = rd.value
+ implicit def richDateToCalendar(rd : RichDate)(implicit tz : TimeZone) = {
+ val cal = Calendar.getInstance(tz)
+ cal.setTime(rd.value)
+ cal
+ }
+ implicit def calendarToRichDate(cal : Calendar) = RichDate(cal.getTime())
+}
+
+/**
+* Represents millisecond based duration (non-calendar based): seconds, minutes, hours
+*/
+abstract class Duration {
+ def toMillisecs : Long
+ def toSeconds = toMillisecs / 1000.0
+ def +(that : Duration) = Millisecs(toMillisecs + that.toMillisecs)
+ def -(that : Duration) = Millisecs(toMillisecs - that.toMillisecs)
+ def *(that : Double) = Millisecs((toMillisecs * that).toLong)
+ def /(that : Double) = Millisecs((toMillisecs / that).toLong)
+}
+case class Millisecs(value : Long) extends Duration {
+ def toMillisecs = value
+}
+case class Seconds(value : Long) extends Duration {
+ def toMillisecs = value * 1000L
+}
+case class Minutes(value : Long) extends Duration {
+ def toMillisecs = value * 60 * 1000L
+}
+case class Hours(value : Long) extends Duration {
+ def toMillisecs = value * 60 * 60 * 1000L
+}
+
+/**
+* this is only relative to a calendar
+*/
+abstract class CalendarDuration {
+ def toDays : Int
+ def +(that : CalendarDuration) = Days(toDays + that.toDays)
+ def -(that : CalendarDuration) = Days(toDays - that.toDays)
+}
+
+case class Days(value : Int) extends CalendarDuration {
+ def toDays = value
+}
+case class Weeks(value : Int) extends CalendarDuration {
+ def toDays = value * 7
+}
+
+/**
+* RichDate adds some nice convenience functions to the Java date/calendar classes
+* We commonly do Date/Time work in analysis jobs, so having these operations convenient
+* is very helpful.
+*/
+object RichDate {
+ def apply(s : String)(implicit tz : TimeZone) = {
+ DateOps.stringToRichDate(s)(tz)
+ }
+ def apply(l : Long) = {
+ DateOps.longToRichDate(l)
+ }
+ def upperBound(s : String)(implicit tz : TimeZone) = {
+ val end = apply(s)(tz)
+ (DateOps.getFormat(s) match {
+ case DateOps.DATE_WITH_DASH => end + Days(1)
+ case DateOps.DATEHOUR_WITH_DASH => end + Hours(1)
+ case DateOps.DATETIME_WITH_DASH => end + Minutes(1)
+ case DateOps.DATETIME_HMS_WITH_DASH => end + Seconds(1)
+ case DateOps.DATETIME_HMSM_WITH_DASH => end + Millisecs(2)
+ }) - Millisecs(1)
+ }
+}
+
+case class RichDate(val value : Date) extends Ordered[RichDate] {
+ def +(interval : Duration) = new RichDate(new Date(value.getTime + interval.toMillisecs))
+ def -(interval : Duration) = new RichDate(new Date(value.getTime - interval.toMillisecs))
+
+ def +(interval : CalendarDuration)(implicit tz : TimeZone) = {
+ val cal = toCalendar(tz)
+ cal.setLenient(true)
+ cal.add(Calendar.DAY_OF_YEAR, interval.toDays)
+ new RichDate(cal.getTime)
+ }
+ def -(interval : CalendarDuration)(implicit tz : TimeZone) = {
+ val cal = toCalendar(tz)
+ cal.setLenient(true)
+ cal.add(Calendar.DAY_OF_YEAR, -(interval.toDays))
+ new RichDate(cal.getTime)
+ }
+
+ //Inverse of the above, d2 + (d1 - d2) == d1
+ def -(that : RichDate) : Duration = {
+ val diff = value.getTime - that.value.getTime
+ val units = List(Hours,Minutes,Seconds,Millisecs)
+ //We can't fail the last one, x % 1 == 0
+ val d_unit = units.find { u : Function1[Long,Duration] =>
+ (diff % u(1).toMillisecs) == 0
+ }.head
+ d_unit( diff / d_unit(1).toMillisecs )
+ }
+ override def compare(that : RichDate) : Int = {
+ if (value.before(that.value)) {
+ -1
+ }
+ else if (value.after(that.value)) {
+ 1
+ } else {
+ 0
+ }
+ }
+
+ private def earliestIn(calField : Int, tz : TimeZone) : RichDate = {
+ val cal = toCalendar(tz)
+ new RichDate(DateUtils.truncate(cal, calField).getTime)
+ }
+ /**
+ * Truncate to the earliest millisecond in the same hour as this time, in the given TZ.
+ */
+ def earliestInHour(implicit tz : TimeZone) = earliestIn(Calendar.HOUR, tz)
+ /**
+ * Truncate to the earliest millisecond in the same day as this time, in the given TZ.
+ */
+ def earliestInDay(implicit tz : TimeZone) = earliestIn(Calendar.DAY_OF_MONTH, tz)
+ /**
+ * Truncate to the earliest millisecond in the most recent Monday as this time, in the given TZ.
+ */
+ def earliestInWeek(implicit tz : TimeZone) = {
+ @tailrec def recentMonday(cal : Calendar) : Calendar = {
+ cal.get(Calendar.DAY_OF_WEEK) match {
+ case Calendar.MONDAY => cal
+ case _ => {
+ //The sorrows of the highly mutable Java standard library
+ val newc = cal.clone().asInstanceOf[Calendar];
+ //Make it clear we want to interpret a previous day at the beginning
+ //of the year/week as the previous week
+ newc.setLenient(true)
+ newc.add(Calendar.DAY_OF_MONTH, -1)
+ recentMonday(newc)
+ }
+ }
+ }
+ val mon = recentMonday(toCalendar(tz))
+ //Set it to the earliest point in the day:
+ DateUtils.truncate(mon, Calendar.DATE)
+ new RichDate(mon.getTime)
+ }
+
+ //True of the other is a RichDate with equal value, or a Date equal to value
+ override def equals(that : Any) = {
+ //Due to type erasure (scala 2.9 complains), we need to use a manifest:
+ def opInst[T : Manifest](v : Any) = {
+ val klass = manifest[T].erasure
+ if(null != v && klass.isInstance(v)) Some(v.asInstanceOf[T]) else None
+ }
+ opInst[RichDate](that)
+ .map( _.value)
+ .orElse(opInst[Date](that))
+ .map( _.equals(value) )
+ .getOrElse(false)
+ }
+ override def hashCode = { value.hashCode }
+
+ //milliseconds since the epoch
+ def timestamp : Long = value.getTime
+
+ def toCalendar(implicit tz: TimeZone) = {
+ val cal = Calendar.getInstance(tz)
+ cal.setTime(value)
+ cal
+ }
+ override def toString = {
+ value.toString
+ }
+
+ def toString(fmt : String)(implicit tz : TimeZone) : String = {
+ val cal = toCalendar(tz)
+ val sdfmt = new SimpleDateFormat(fmt)
+ sdfmt.setCalendar(cal)
+ sdfmt.format(cal.getTime)
+ }
+}
+
+/**
+* represents a closed interval of time.
+*/
+case class DateRange(val start : RichDate, val end : RichDate) {
+ import DateOps._
+ /**
+ * shift this by the given unit
+ */
+ def +(timespan : Duration) = DateRange(start + timespan, end + timespan)
+ def -(timespan : Duration) = DateRange(start - timespan, end - timespan)
+
+ def isBefore(d : RichDate) = end < d
+ def isAfter(d : RichDate) = d < start
+ /**
+ * make the range wider by delta on each side. Good to catch events which
+ * might spill over.
+ */
+ def embiggen(delta : Duration) = DateRange(start - delta, end + delta)
+ /**
+ * Extend the length by moving the end. We can keep the party going, but we
+ * can't start it earlier.
+ */
+ def extend(delta : Duration) = DateRange(start, end + delta)
+
+ def contains(point : RichDate) = (start <= point) && (point <= end)
+ /**
+ * Is the given Date range a (non-strict) subset of the given range
+ */
+ def contains(dr : DateRange) = start <= dr.start && dr.end <= end
+
+ /**
+ * produce a contiguous non-overlapping set of DateRanges
+ * whose union is equivalent to this.
+ * If it is passed an hour, day, or week interval, the break points
+ * are set by the start timezone, otherwise we break and start + k * span.
+ */
+ def each(span : Duration)(implicit tz: TimeZone) : Iterable[DateRange] = {
+ //tail recursive method which produces output (as a stack, so it is
+ //reversed). acc is the accumulated list so far:
+ @tailrec def eachRec(acc : List[DateRange], nextDr : DateRange) : List[DateRange] = {
+ val next_start = span match {
+ case Hours(_) => nextDr.start.earliestInHour(tz) + span
+ case _ => nextDr.start + span
+ }
+ //the smallest grain of time we count is 1 millisecond
+ val this_end = next_start - Millisecs(1)
+ if( nextDr.end <= this_end ) {
+ //This is the last block, output and end:
+ nextDr :: acc
+ }
+ else {
+ //Put today's portion, and then start on tomorrow:
+ val today = DateRange(nextDr.start, this_end)
+ eachRec(today :: acc, DateRange(next_start, nextDr.end))
+ }
+ }
+ //have to reverse because eachDayRec produces backwards
+ eachRec(Nil, this).reverse
+ }
+ /**
+ * produce a contiguous non-overlapping set of DateRanges
+ * whose union is equivalent to this.
+ * Operate on CalendarDurations
+ */
+ def each(span : CalendarDuration)(implicit tz: TimeZone) : Iterable[DateRange] = {
+ //tail recursive method which produces output (as a stack, so it is
+ //reversed). acc is the accumulated list so far:
+ @tailrec def eachRec(acc : List[DateRange], nextDr : DateRange) : List[DateRange] = {
+ val next_start = span match {
+ case Weeks(_) => nextDr.start.earliestInWeek(tz) + span
+ case Days(_) => nextDr.start.earliestInDay(tz) + span
+ }
+ //the smallest grain of time we count is 1 millisecond
+ val this_end = next_start - Millisecs(1)
+ if( nextDr.end <= this_end ) {
+ //This is the last block, output and end:
+ nextDr :: acc
+ }
+ else {
+ //Put today's portion, and then start on tomorrow:
+ val today = DateRange(nextDr.start, this_end)
+ eachRec(today :: acc, DateRange(next_start, nextDr.end))
+ }
+ }
+ //have to reverse because eachDayRec produces backwards
+ eachRec(Nil, this).reverse
+ }
+}
128 src/main/scala/com/twitter/scalding/FieldConversions.scala
@@ -0,0 +1,128 @@
+/*
+Copyright 2012 Twitter, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+package com.twitter.scalding
+
+import cascading.tuple.Fields
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.WrappedArray
+
+trait LowPriorityFieldConversions {
+
+ protected def anyToFieldArg(f : Any) : Comparable[_] = {
+ f match {
+ case x : Symbol => x.name
+ case y : String => y
+ case z : java.lang.Integer => z
+ case w => throw new Exception("Could not convert: " + w.toString + " to Fields argument")
+ }
+ }
+
+ /**
+ * Handles treating any TupleN as a Fields object.
+ * This is low priority because List is also a Product, but this method
+ * will not work for List (because List is Product2(head, tail) and so
+ * productIterator won't work as expected.
+ * Lists are handled by an implicit in FieldConversions, which have
+ * higher priority.
+ */
+ implicit def productToFields( f : Product ) = {
+ new Fields(f.productIterator.map { anyToFieldArg }.toSeq :_* )
+ }
+}
+
+trait FieldConversions extends LowPriorityFieldConversions {
+ def asSet(f : Fields) : Set[Any] = f.iterator.toSet
+
+ def hasInts(f : Fields) = {
+ f.iterator.find { _.isInstanceOf[java.lang.Integer] }.isDefined
+ }
+
+ /**
+ * Rather than give the full power of cascading's selectors, we have
+ * a simpler set of rules encoded below:
+ * 1) if the input is non-definite (ALL, GROUP, ARGS, etc...) ALL is the output.
+ * Perhaps only fromFields=ALL will make sense
+ * 2) If one of from or to is a strict super set of the other, SWAP is used.
+ * 3) If they are equal, REPLACE is used.
+ * 4) Otherwise, ALL is used.
+ */
+ def defaultMode(fromFields : Fields, toFields : Fields) : Fields = {
+ if( fromFields.size == 0 ) {
+ //This is all the UNKNOWN, ALL, etc...
+ Fields.ALL
+ }
+ else {
+ val fromSet = asSet(fromFields)
+ val toSet = asSet(toFields)
+ (fromSet.subsetOf(toSet), toSet.subsetOf(fromSet)) match {
+ case (true, true) => Fields.REPLACE //equal
+ case (true, false) => Fields.SWAP //output super set, replaces input
+ case (false, true) => Fields.SWAP //throw away some input
+ /*
+ * the next case is that they are disjoint or have some nontrivial intersection
+ * if disjoint, everything is fine.
+ * if they intersect, it is ill-defined and cascading is going to throw an error BEFORE
+ * starting the flow.
+ */
+ case (false, false) => Fields.ALL
+ }
+ }
+ }
+
+ //Single entry fields:
+ implicit def unitToFields(u : Unit) = Fields.NONE
+ implicit def intToFields(x : Int) = new Fields(new java.lang.Integer(x))
+ implicit def integerToFields(x : java.lang.Integer) = new Fields(x)
+ implicit def stringToFields(x : String) = new Fields(x)
+ /**
+ * '* means Fields.ALL, otherwise we take the .name
+ */
+ implicit def symbolToFields(x : Symbol) = {
+ if(x == '*) {
+ Fields.ALL
+ }
+ else {
+ new Fields(x.name)
+ }
+ }
+
+ /**
+ * Multi-entry fields. This are higher priority than Product conversions so
+ * that List will not conflict with Product.
+ */
+ implicit def fields[T <: TraversableOnce[Symbol]](f : T) = new Fields(f.toSeq.map(_.name) : _*)
+ implicit def strFields[T <: TraversableOnce[String]](f : T) = new Fields(f.toSeq : _*)
+ implicit def intFields[T <: TraversableOnce[Int]](f : T) = {
+ new Fields(f.toSeq.map { new java.lang.Integer(_) } : _*)
+ }
+ /**
+ * Useful to convert f : Any* to Fields. This handles mixed cases ("hey", 'you).
+ * Not sure we should be this flexible, but given that Cascading will throw an
+ * exception before scheduling the job, I guess this is okay.
+ */
+ implicit def parseAnySeqToFields[T <: TraversableOnce[Any]](anyf : T) = {
+ new Fields(anyf.toSeq.map { anyToFieldArg } : _* )
+ }
+
+ //Handle a pair generally:
+ implicit def tuple2ToFieldsPair[T,U]( pair : (T,U) )
+ (implicit tf : T => Fields, uf : U => Fields) : (Fields,Fields) = {
+ val f1 = tf(pair._1)
+ val f2 = uf(pair._2)
+ (f1, f2)
+ }
+}
1,112 src/main/scala/com/twitter/scalding/GeneratedConversions.scala
@@ -0,0 +1,1112 @@
+/*
+Copyright 2012 Twitter, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+// following were autogenerated by ./scalding_gen.rb at Thu Nov 17 15:32:06 -0800 2011 do not edit
+package com.twitter.scalding
+import cascading.tuple.Tuple
+import cascading.tuple.TupleEntry
+
+trait GeneratedConversions extends LowPriorityConversions {
+
+ implicit def tuple1Converter[A](implicit
+ gA : TupleGetter[A]) = new TupleConverter[Tuple1[A]]{
+ def apply(tup : Tuple) = {
+ Tuple1(gA.get(tup, 0))
+ }
+ def arity = 1
+ }
+
+ implicit object Tup1Setter extends TupleSetter[Tuple1[_]] {
+ override def apply(arg : Tuple1[_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup
+ }
+
+ override def arity = 1
+ }
+
+ implicit def tuple2Converter[A,B](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B]) = new TupleConverter[Tuple2[A,B]]{
+ def apply(tup : Tuple) = {
+ Tuple2(gA.get(tup, 0),
+ gB.get(tup, 1))
+ }
+ def arity = 2
+ }
+
+ implicit object Tup2Setter extends TupleSetter[Tuple2[_,_]] {
+ override def apply(arg : Tuple2[_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup
+ }
+
+ override def arity = 2
+ }
+
+ implicit def tuple3Converter[A,B,C](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C]) = new TupleConverter[Tuple3[A,B,C]]{
+ def apply(tup : Tuple) = {
+ Tuple3(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2))
+ }
+ def arity = 3
+ }
+
+ implicit object Tup3Setter extends TupleSetter[Tuple3[_,_,_]] {
+ override def apply(arg : Tuple3[_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup
+ }
+
+ override def arity = 3
+ }
+
+ implicit def tuple4Converter[A,B,C,D](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D]) = new TupleConverter[Tuple4[A,B,C,D]]{
+ def apply(tup : Tuple) = {
+ Tuple4(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3))
+ }
+ def arity = 4
+ }
+
+ implicit object Tup4Setter extends TupleSetter[Tuple4[_,_,_,_]] {
+ override def apply(arg : Tuple4[_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup
+ }
+
+ override def arity = 4
+ }
+
+ implicit def tuple5Converter[A,B,C,D,E](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E]) = new TupleConverter[Tuple5[A,B,C,D,E]]{
+ def apply(tup : Tuple) = {
+ Tuple5(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4))
+ }
+ def arity = 5
+ }
+
+ implicit object Tup5Setter extends TupleSetter[Tuple5[_,_,_,_,_]] {
+ override def apply(arg : Tuple5[_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup
+ }
+
+ override def arity = 5
+ }
+
+ implicit def tuple6Converter[A,B,C,D,E,F](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F]) = new TupleConverter[Tuple6[A,B,C,D,E,F]]{
+ def apply(tup : Tuple) = {
+ Tuple6(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5))
+ }
+ def arity = 6
+ }
+
+ implicit object Tup6Setter extends TupleSetter[Tuple6[_,_,_,_,_,_]] {
+ override def apply(arg : Tuple6[_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup
+ }
+
+ override def arity = 6
+ }
+
+ implicit def tuple7Converter[A,B,C,D,E,F,G](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G]) = new TupleConverter[Tuple7[A,B,C,D,E,F,G]]{
+ def apply(tup : Tuple) = {
+ Tuple7(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6))
+ }
+ def arity = 7
+ }
+
+ implicit object Tup7Setter extends TupleSetter[Tuple7[_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple7[_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup
+ }
+
+ override def arity = 7
+ }
+
+ implicit def tuple8Converter[A,B,C,D,E,F,G,H](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H]) = new TupleConverter[Tuple8[A,B,C,D,E,F,G,H]]{
+ def apply(tup : Tuple) = {
+ Tuple8(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7))
+ }
+ def arity = 8
+ }
+
+ implicit object Tup8Setter extends TupleSetter[Tuple8[_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple8[_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup
+ }
+
+ override def arity = 8
+ }
+
+ implicit def tuple9Converter[A,B,C,D,E,F,G,H,I](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I]) = new TupleConverter[Tuple9[A,B,C,D,E,F,G,H,I]]{
+ def apply(tup : Tuple) = {
+ Tuple9(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8))
+ }
+ def arity = 9
+ }
+
+ implicit object Tup9Setter extends TupleSetter[Tuple9[_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple9[_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup
+ }
+
+ override def arity = 9
+ }
+
+ implicit def tuple10Converter[A,B,C,D,E,F,G,H,I,J](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J]) = new TupleConverter[Tuple10[A,B,C,D,E,F,G,H,I,J]]{
+ def apply(tup : Tuple) = {
+ Tuple10(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9))
+ }
+ def arity = 10
+ }
+
+ implicit object Tup10Setter extends TupleSetter[Tuple10[_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple10[_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup
+ }
+
+ override def arity = 10
+ }
+
+ implicit def tuple11Converter[A,B,C,D,E,F,G,H,I,J,K](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K]) = new TupleConverter[Tuple11[A,B,C,D,E,F,G,H,I,J,K]]{
+ def apply(tup : Tuple) = {
+ Tuple11(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10))
+ }
+ def arity = 11
+ }
+
+ implicit object Tup11Setter extends TupleSetter[Tuple11[_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple11[_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup
+ }
+
+ override def arity = 11
+ }
+
+ implicit def tuple12Converter[A,B,C,D,E,F,G,H,I,J,K,L](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L]) = new TupleConverter[Tuple12[A,B,C,D,E,F,G,H,I,J,K,L]]{
+ def apply(tup : Tuple) = {
+ Tuple12(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11))
+ }
+ def arity = 12
+ }
+
+ implicit object Tup12Setter extends TupleSetter[Tuple12[_,_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple12[_,_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup.add(arg._12)
+ tup
+ }
+
+ override def arity = 12
+ }
+
+ implicit def tuple13Converter[A,B,C,D,E,F,G,H,I,J,K,L,M](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L],
+ gM : TupleGetter[M]) = new TupleConverter[Tuple13[A,B,C,D,E,F,G,H,I,J,K,L,M]]{
+ def apply(tup : Tuple) = {
+ Tuple13(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11),
+ gM.get(tup, 12))
+ }
+ def arity = 13
+ }
+
+ implicit object Tup13Setter extends TupleSetter[Tuple13[_,_,_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple13[_,_,_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup.add(arg._12)
+ tup.add(arg._13)
+ tup
+ }
+
+ override def arity = 13
+ }
+
+ implicit def tuple14Converter[A,B,C,D,E,F,G,H,I,J,K,L,M,N](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L],
+ gM : TupleGetter[M],
+ gN : TupleGetter[N]) = new TupleConverter[Tuple14[A,B,C,D,E,F,G,H,I,J,K,L,M,N]]{
+ def apply(tup : Tuple) = {
+ Tuple14(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11),
+ gM.get(tup, 12),
+ gN.get(tup, 13))
+ }
+ def arity = 14
+ }
+
+ implicit object Tup14Setter extends TupleSetter[Tuple14[_,_,_,_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple14[_,_,_,_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup.add(arg._12)
+ tup.add(arg._13)
+ tup.add(arg._14)
+ tup
+ }
+
+ override def arity = 14
+ }
+
+ implicit def tuple15Converter[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L],
+ gM : TupleGetter[M],
+ gN : TupleGetter[N],
+ gO : TupleGetter[O]) = new TupleConverter[Tuple15[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O]]{
+ def apply(tup : Tuple) = {
+ Tuple15(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11),
+ gM.get(tup, 12),
+ gN.get(tup, 13),
+ gO.get(tup, 14))
+ }
+ def arity = 15
+ }
+
+ implicit object Tup15Setter extends TupleSetter[Tuple15[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple15[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup.add(arg._12)
+ tup.add(arg._13)
+ tup.add(arg._14)
+ tup.add(arg._15)
+ tup
+ }
+
+ override def arity = 15
+ }
+
+ implicit def tuple16Converter[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L],
+ gM : TupleGetter[M],
+ gN : TupleGetter[N],
+ gO : TupleGetter[O],
+ gP : TupleGetter[P]) = new TupleConverter[Tuple16[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P]]{
+ def apply(tup : Tuple) = {
+ Tuple16(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11),
+ gM.get(tup, 12),
+ gN.get(tup, 13),
+ gO.get(tup, 14),
+ gP.get(tup, 15))
+ }
+ def arity = 16
+ }
+
+ implicit object Tup16Setter extends TupleSetter[Tuple16[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple16[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup.add(arg._12)
+ tup.add(arg._13)
+ tup.add(arg._14)
+ tup.add(arg._15)
+ tup.add(arg._16)
+ tup
+ }
+
+ override def arity = 16
+ }
+
+ implicit def tuple17Converter[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L],
+ gM : TupleGetter[M],
+ gN : TupleGetter[N],
+ gO : TupleGetter[O],
+ gP : TupleGetter[P],
+ gQ : TupleGetter[Q]) = new TupleConverter[Tuple17[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q]]{
+ def apply(tup : Tuple) = {
+ Tuple17(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11),
+ gM.get(tup, 12),
+ gN.get(tup, 13),
+ gO.get(tup, 14),
+ gP.get(tup, 15),
+ gQ.get(tup, 16))
+ }
+ def arity = 17
+ }
+
+ implicit object Tup17Setter extends TupleSetter[Tuple17[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple17[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup.add(arg._12)
+ tup.add(arg._13)
+ tup.add(arg._14)
+ tup.add(arg._15)
+ tup.add(arg._16)
+ tup.add(arg._17)
+ tup
+ }
+
+ override def arity = 17
+ }
+
+ implicit def tuple18Converter[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L],
+ gM : TupleGetter[M],
+ gN : TupleGetter[N],
+ gO : TupleGetter[O],
+ gP : TupleGetter[P],
+ gQ : TupleGetter[Q],
+ gR : TupleGetter[R]) = new TupleConverter[Tuple18[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R]]{
+ def apply(tup : Tuple) = {
+ Tuple18(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11),
+ gM.get(tup, 12),
+ gN.get(tup, 13),
+ gO.get(tup, 14),
+ gP.get(tup, 15),
+ gQ.get(tup, 16),
+ gR.get(tup, 17))
+ }
+ def arity = 18
+ }
+
+ implicit object Tup18Setter extends TupleSetter[Tuple18[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple18[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup.add(arg._12)
+ tup.add(arg._13)
+ tup.add(arg._14)
+ tup.add(arg._15)
+ tup.add(arg._16)
+ tup.add(arg._17)
+ tup.add(arg._18)
+ tup
+ }
+
+ override def arity = 18
+ }
+
+ implicit def tuple19Converter[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L],
+ gM : TupleGetter[M],
+ gN : TupleGetter[N],
+ gO : TupleGetter[O],
+ gP : TupleGetter[P],
+ gQ : TupleGetter[Q],
+ gR : TupleGetter[R],
+ gS : TupleGetter[S]) = new TupleConverter[Tuple19[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S]]{
+ def apply(tup : Tuple) = {
+ Tuple19(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11),
+ gM.get(tup, 12),
+ gN.get(tup, 13),
+ gO.get(tup, 14),
+ gP.get(tup, 15),
+ gQ.get(tup, 16),
+ gR.get(tup, 17),
+ gS.get(tup, 18))
+ }
+ def arity = 19
+ }
+
+ implicit object Tup19Setter extends TupleSetter[Tuple19[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]] {
+ override def apply(arg : Tuple19[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]) = {
+ val tup = new Tuple
+ tup.add(arg._1)
+ tup.add(arg._2)
+ tup.add(arg._3)
+ tup.add(arg._4)
+ tup.add(arg._5)
+ tup.add(arg._6)
+ tup.add(arg._7)
+ tup.add(arg._8)
+ tup.add(arg._9)
+ tup.add(arg._10)
+ tup.add(arg._11)
+ tup.add(arg._12)
+ tup.add(arg._13)
+ tup.add(arg._14)
+ tup.add(arg._15)
+ tup.add(arg._16)
+ tup.add(arg._17)
+ tup.add(arg._18)
+ tup.add(arg._19)
+ tup
+ }
+
+ override def arity = 19
+ }
+
+ implicit def tuple20Converter[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T](implicit
+ gA : TupleGetter[A],
+ gB : TupleGetter[B],
+ gC : TupleGetter[C],
+ gD : TupleGetter[D],
+ gE : TupleGetter[E],
+ gF : TupleGetter[F],
+ gG : TupleGetter[G],
+ gH : TupleGetter[H],
+ gI : TupleGetter[I],
+ gJ : TupleGetter[J],
+ gK : TupleGetter[K],
+ gL : TupleGetter[L],
+ gM : TupleGetter[M],
+ gN : TupleGetter[N],
+ gO : TupleGetter[O],
+ gP : TupleGetter[P],
+ gQ : TupleGetter[Q],
+ gR : TupleGetter[R],
+ gS : TupleGetter[S],
+ gT : TupleGetter[T]) = new TupleConverter[Tuple20[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T]]{
+ def apply(tup : Tuple) = {
+ Tuple20(gA.get(tup, 0),
+ gB.get(tup, 1),
+ gC.get(tup, 2),
+ gD.get(tup, 3),
+ gE.get(tup, 4),
+ gF.get(tup, 5),
+ gG.get(tup, 6),
+ gH.get(tup, 7),
+ gI.get(tup, 8),
+ gJ.get(tup, 9),
+ gK.get(tup, 10),
+ gL.get(tup, 11),
+ gM.get(tup, 12),
+ gN.get(tup, 13),
+ gO.get(tup, 14),
+ gP.get(tup, 15),
+ gQ.get(tup, 16),
+ gR.get(tup, 17),
+ gS.get(tup, 18),
+ gT.get(tup, 19))
+ }
+ def arity = 20
+ }
+
+ implicit object Tup20Setter extends TupleSetter[Tuple20[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]] {