Permalink
Browse files

Sqoop migrated to Apache Software Foundation.

This commit removes all the sources from Cloudera's GitHub repository.
Sqoop sources now reside in Apache Subversion repository at:

https://svn.apache.org/repos/asf/incubator/sqoop/trunk/

Please see the README.txt file for more details on this transition.
  • Loading branch information...
1 parent 20afd90 commit fd58f8f83f92beecc81226403dc2cfc8722c8682 Arvind Prabhakar committed Aug 3, 2011
Showing with 16 additions and 19,812 deletions.
  1. +0 −26 .gitignore
  2. +0 −256 COMPILING.txt
  3. +0 −202 LICENSE.txt
  4. +0 −3 NOTICE.txt
  5. +16 −41 README.txt
  6. +0 −16 bin/.gitignore
  7. +0 −114 bin/configure-sqoop
  8. +0 −98 bin/sqoop
  9. +0 −147 bin/start-metastore.sh
  10. +0 −58 bin/stop-metastore.sh
  11. +0 −1,068 build.xml
  12. +0 −16 conf/.gitignore
  13. +0 −154 conf/sqoop-site-template.xml
  14. +0 −125 ivy.xml
  15. +0 −88 ivy/ivysettings.xml
  16. +0 −46 ivy/libraries.properties
  17. +0 −51 ivy/sqoop-test.xml
  18. +0 −55 ivy/sqoop.xml
  19. +0 −15 lib/.gitignore
  20. +0 −244 lib/LICENSE-ant-eclipse-1.0-jvm1.2.txt
  21. +0 −244 lib/LICENSE-hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.txt
  22. BIN lib/ant-contrib-1.0b3.jar
  23. BIN lib/ant-eclipse-1.0-jvm1.2.jar
  24. BIN lib/hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.jar
  25. +0 −17 src/docs/.gitignore
  26. +0 −109 src/docs/Makefile
  27. +0 −33 src/docs/dev/SqoopDevGuide.txt
  28. +0 −331 src/docs/dev/api-reference.txt
  29. +0 −32 src/docs/dev/compiling.txt
  30. +0 −28 src/docs/dev/intro.txt
  31. +0 −55 src/docs/dev/preface.txt
  32. +0 −38 src/docs/man/codegen-args.txt
  33. +0 −52 src/docs/man/common-args.txt
  34. +0 −35 src/docs/man/hbase-args.txt
  35. +0 −37 src/docs/man/hive-args.txt
  36. +0 −83 src/docs/man/import-args.txt
  37. +0 −43 src/docs/man/input-args.txt
  38. +0 −69 src/docs/man/output-args.txt
  39. +0 −77 src/docs/man/sqoop-codegen.txt
  40. +0 −72 src/docs/man/sqoop-create-hive-table.txt
  41. +0 −57 src/docs/man/sqoop-eval.txt
  42. +0 −99 src/docs/man/sqoop-export.txt
  43. +0 −42 src/docs/man/sqoop-help.txt
  44. +0 −121 src/docs/man/sqoop-import-all-tables.txt
  45. +0 −103 src/docs/man/sqoop-import.txt
  46. +0 −94 src/docs/man/sqoop-job.txt
  47. +0 −55 src/docs/man/sqoop-list-databases.txt
  48. +0 −49 src/docs/man/sqoop-list-tables.txt
  49. +0 −77 src/docs/man/sqoop-merge.txt
  50. +0 −51 src/docs/man/sqoop-metastore.txt
  51. +0 −36 src/docs/man/sqoop-version.txt
  52. +0 −79 src/docs/man/sqoop.txt
  53. +0 −25 src/docs/sip/INDEX.txt
  54. +0 −30 src/docs/sip/README.txt
  55. +0 −134 src/docs/sip/sip-1.txt
  56. +0 −141 src/docs/sip/sip-2.txt
  57. +0 −368 src/docs/sip/sip-3.txt
  58. +0 −98 src/docs/sip/sip-4.txt
  59. +0 −189 src/docs/sip/sip-6.txt
  60. +0 −58 src/docs/user/SqoopUserGuide.txt
  61. +0 −63 src/docs/user/basics.txt
  62. +0 −33 src/docs/user/codegen-args.txt
  63. +0 −26 src/docs/user/codegen-purpose.txt
  64. +0 −78 src/docs/user/codegen.txt
  65. +0 −38 src/docs/user/common-args.txt
  66. +0 −186 src/docs/user/compatibility.txt
  67. +0 −94 src/docs/user/connecting.txt
  68. +0 −42 src/docs/user/controlling-input-format.txt
  69. +0 −38 src/docs/user/controlling-output-format.txt
  70. +0 −29 src/docs/user/create-hive-table-purpose.txt
  71. +0 −75 src/docs/user/create-hive-table.txt
  72. +0 −23 src/docs/user/eval-purpose.txt
  73. +0 −69 src/docs/user/eval.txt
  74. +0 −29 src/docs/user/export-purpose.txt
  75. +0 −249 src/docs/user/export.txt
  76. +0 −32 src/docs/user/hbase-args.txt
  77. +0 −49 src/docs/user/hbase.txt
  78. +0 −84 src/docs/user/help.txt
  79. +0 −40 src/docs/user/hive-args.txt
  80. +0 −30 src/docs/user/hive-notes.txt
  81. +0 −78 src/docs/user/hive.txt
  82. +0 −29 src/docs/user/import-all-tables-purpose.txt
  83. +0 −105 src/docs/user/import-all-tables.txt
  84. +0 −23 src/docs/user/import-purpose.txt
  85. +0 −632 src/docs/user/import.txt
  86. +0 −34 src/docs/user/input-args.txt
  87. +0 −34 src/docs/user/input-formatting-args.txt
  88. +0 −24 src/docs/user/input-formatting.txt
  89. +0 −45 src/docs/user/intro.txt
  90. +0 −27 src/docs/user/job-purpose.txt
  91. +0 −55 src/docs/user/list-databases.txt
  92. +0 −54 src/docs/user/list-tables.txt
  93. +0 −26 src/docs/user/merge-purpose.txt
  94. +0 −26 src/docs/user/metastore-purpose.txt
  95. +0 −35 src/docs/user/output-args.txt
  96. +0 −39 src/docs/user/output-formatting-args.txt
  97. +0 −44 src/docs/user/output-formatting.txt
  98. +0 −62 src/docs/user/preface.txt
  99. +0 −308 src/docs/user/saved-jobs.txt
  100. +0 −36 src/docs/user/support.txt
  101. +0 −249 src/docs/user/tools.txt
  102. +0 −48 src/docs/user/version.txt
  103. +0 −47 src/docs/web/breadcrumbs.xsl
  104. +0 −269 src/docs/web/docbook.css
  105. +0 −47 src/docs/web/footer.xsl
  106. +0 −41 src/docs/web/header.xsl
  107. +0 −84 src/docs/web/html.xsl
  108. +0 −5 src/docs/web/images/README
  109. BIN src/docs/web/images/callouts/1.png
  110. BIN src/docs/web/images/callouts/10.png
  111. BIN src/docs/web/images/callouts/11.png
  112. BIN src/docs/web/images/callouts/12.png
  113. BIN src/docs/web/images/callouts/13.png
  114. BIN src/docs/web/images/callouts/14.png
  115. BIN src/docs/web/images/callouts/15.png
  116. BIN src/docs/web/images/callouts/2.png
  117. BIN src/docs/web/images/callouts/3.png
  118. BIN src/docs/web/images/callouts/4.png
  119. BIN src/docs/web/images/callouts/5.png
  120. BIN src/docs/web/images/callouts/6.png
  121. BIN src/docs/web/images/callouts/7.png
  122. BIN src/docs/web/images/callouts/8.png
  123. BIN src/docs/web/images/callouts/9.png
  124. BIN src/docs/web/images/caution.png
  125. BIN src/docs/web/images/example.png
  126. BIN src/docs/web/images/home.png
  127. BIN src/docs/web/images/important.png
  128. BIN src/docs/web/images/next.png
  129. BIN src/docs/web/images/note.png
  130. BIN src/docs/web/images/prev.png
  131. BIN src/docs/web/images/tip.png
  132. BIN src/docs/web/images/up.png
  133. BIN src/docs/web/images/warning.png
  134. +0 −30 src/docs/web/index.txt
  135. +0 −232 src/java/com/cloudera/sqoop/ConnFactory.java
  136. +0 −241 src/java/com/cloudera/sqoop/Sqoop.java
  137. +0 −1,841 src/java/com/cloudera/sqoop/SqoopOptions.java
  138. +0 −49 src/java/com/cloudera/sqoop/cli/RelatedOptions.java
  139. +0 −118 src/java/com/cloudera/sqoop/cli/SqoopParser.java
  140. +0 −151 src/java/com/cloudera/sqoop/cli/ToolOptions.java
  141. +0 −83 src/java/com/cloudera/sqoop/config/ConfigurationConstants.java
  142. +0 −171 src/java/com/cloudera/sqoop/config/ConfigurationHelper.java
  143. +0 −133 src/java/com/cloudera/sqoop/hbase/HBasePutProcessor.java
  144. +0 −51 src/java/com/cloudera/sqoop/hbase/HBaseUtil.java
  145. +0 −75 src/java/com/cloudera/sqoop/hbase/PutTransformer.java
  146. +0 −100 src/java/com/cloudera/sqoop/hbase/ToStringPutTransformer.java
  147. +0 −330 src/java/com/cloudera/sqoop/hive/HiveImport.java
  148. +0 −100 src/java/com/cloudera/sqoop/hive/HiveTypes.java
  149. +0 −253 src/java/com/cloudera/sqoop/hive/TableDefWriter.java
  150. +0 −167 src/java/com/cloudera/sqoop/io/CodecMap.java
  151. +0 −89 src/java/com/cloudera/sqoop/io/FixedLengthInputStream.java
  152. +0 −1,816 src/java/com/cloudera/sqoop/io/LobFile.java
  153. +0 −146 src/java/com/cloudera/sqoop/io/LobReaderCache.java
  154. +0 −80 src/java/com/cloudera/sqoop/io/NamedFifo.java
  155. +0 −73 src/java/com/cloudera/sqoop/io/SplittableBufferedWriter.java
  156. +0 −161 src/java/com/cloudera/sqoop/io/SplittingOutputStream.java
  157. +0 −38 src/java/com/cloudera/sqoop/io/UnsupportedCodecException.java
  158. +0 −83 src/java/com/cloudera/sqoop/lib/BigDecimalSerializer.java
  159. +0 −131 src/java/com/cloudera/sqoop/lib/BlobRef.java
  160. +0 −43 src/java/com/cloudera/sqoop/lib/BooleanParser.java
  161. +0 −111 src/java/com/cloudera/sqoop/lib/ClobRef.java
  162. +0 −233 src/java/com/cloudera/sqoop/lib/DelimiterSet.java
  163. +0 −126 src/java/com/cloudera/sqoop/lib/FieldFormatter.java
  164. +0 −38 src/java/com/cloudera/sqoop/lib/FieldMapProcessor.java
  165. +0 −36 src/java/com/cloudera/sqoop/lib/FieldMappable.java
  166. +0 −254 src/java/com/cloudera/sqoop/lib/JdbcWritableBridge.java
  167. +0 −318 src/java/com/cloudera/sqoop/lib/LargeObjectLoader.java
  168. +0 −327 src/java/com/cloudera/sqoop/lib/LobRef.java
  169. +0 −53 src/java/com/cloudera/sqoop/lib/LobSerializer.java
  170. +0 −48 src/java/com/cloudera/sqoop/lib/ProcessingException.java
  171. +0 −358 src/java/com/cloudera/sqoop/lib/RecordParser.java
  172. +0 −150 src/java/com/cloudera/sqoop/lib/SqoopRecord.java
  173. +0 −291 src/java/com/cloudera/sqoop/manager/ConnManager.java
  174. +0 −127 src/java/com/cloudera/sqoop/manager/DefaultManagerFactory.java
  175. +0 −111 src/java/com/cloudera/sqoop/manager/DirectMySQLManager.java
  176. +0 −474 src/java/com/cloudera/sqoop/manager/DirectPostgresqlManager.java
  177. +0 −75 src/java/com/cloudera/sqoop/manager/ExportJobContext.java
Sorry, we could not display the entire diff because too many files (396) changed.
View
26 .gitignore
@@ -1,26 +0,0 @@
-# Licensed to Cloudera, Inc. under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-/build
-*~
-.classpath
-.project
-.launches
-.settings
-.ant-targets-build.xml
-/tags
-/build.properties
-/conf/managers.d
-/conf/tools.d
-
View
256 COMPILING.txt
@@ -1,256 +0,0 @@
-
-= Compiling
-
-This document explains how to compile Sqoop.
-
-////
- Licensed to Cloudera, Inc. under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-////
-
-
-== Build Dependencies
-
-Compiling Sqoop requires the following tools:
-
-* Apache ant (1.7.1)
-* Java JDK 1.6
-
-Additionally, building the documentation requires these tools:
-
-* asciidoc
-* make
-* python 2.5+
-* xmlto
-* tar
-* gzip
-
-Furthermore, Sqoop's build can be instrumented with the following:
-
-* findbugs (1.3.9) for code quality checks
-* cobertura (1.9.4.1) for code coverage
-* checkstyle (5.x) for code style checks
-
-== The Basics
-
-Sqoop is compiled with ant. Type +ant -p+ to see the list of available targets.
-
-Type +ant+ to compile all java sources. You can then run Sqoop with +bin/sqoop+.
-
-If you want to build everything (including the documentation), type
-+ant package+. This will appear in the
-+build/sqoop-(version)/+ directory.
-
-Sqoop is built against the latest Hadoop distribution available from Cloudera.
-These dependencies are obtained via IVY which downloads the necessary binaries
-from Cloudera maven repository.
-
-== Testing Sqoop
-
-Sqoop has several unit tests which can be run with +ant test+. This command
-will run all the "basic" checks against an in-memory database, HSQLDB.
-
-Sqoop also has compatibility tests that check its ability to work with
-several third-party databases. To enable these tests, you will need to install
-and configure the databases, and download the JDBC drivers for each one.
-
-=== MySQL
-
-Install MySQL server and client 5.0. Download MySQL Connector/J 5.0.8 for
-JDBC. Instructions for configuring the MySQL database are in MySQLAuthTest
-and DirectMySQLTest.
-
-Use the system property sqoop.test.mysql.connectstring.host_url to specify the
-URL for the MySQL host used for testing. Specify this property on the command
-line or via the build.properties file. For example:
-
-sqoop.test.mysql.connectstring.host_url=jdbc:mysql://host.example.com/
-
-If not specified, the default value used for this property is:
-jdbc:mysql://localhost/
-
-=== Oracle
-
-Install Oracle XE (Express edition) 10.2.0. Instructions for configuring the
-database are in OracleManagerTest. Download the ojdbc6_g jar.
-
-Use the system property sqoop.test.oracle.connectstring to specify the
-connection string for Oracle host used for testing. Specify this property on the
-command line or via the build.properties file. For example:
-
-sqoop.test.oracle.connectstring=jdbc:oracle:thin:@//host.example.com/xe
-
-If not specified, the default value used for this property is:
-jdbc:oracle:thin:@//localhost/xe
-
-=== PostgreSQL
-
-Install PostgreSQL 8.3.9. Download the postgresql 8.4 jdbc driver. Instructions
-for configuring the database are in PostgresqlTest.
-
-Use the system property sqoop.test.postgresql.connectstring.host_url to specify
-the URL for the PostgreSQL host used for testing. Specify this property on the
-command line or via the build.properties file. For example:
-
-sqoop.test.postgresql.connectstring.host_url=jdbc:postgresql://sqoop-dbs.sf.cloudera.com/
-
-If not specified, the default value used for this property is:
-jdbc:postgresql://localhost/
-
-=== SQL Server
-
-Install SQL Server Express 2008 R2 and create a database instance and
-download the appropriate JDBC driver. Instructions for configuring the
-database can be found in SQLServerManagerImportManualTest.
-
-Use the system property sqoop.test.sqlserver.connectstring.host_url to specify
-the URL for the SQL Server host used for testing. Specify this property on the
-command line or via the build.properties file. For example:
-
-sqoop.test.sqlserver.connectstring.host_url=jdbc:sqlserver://sqlserverhost:1433
-
-If not specified, the default value used for this property is:
-jdbc:sqlserver://sqlserverhost:1433
-
-This can be useful if you have the hostname sqlserverhost mapped to the IP
-address of the SQL Server instance.
-
-=== Running the Third-party Tests
-
-After the third-party databases are installed and configured, run:
-
-++++
-ant test -Dthirdparty=true -Dsqoop.thirdparty.lib.dir=/path/to/jdbc/drivers/
-++++
-
-This command will run all thirdparty tests except for the SQL Server test.
-To run the SQL Server test, specify the property "manual" instead of "thirdparty"
-as follows:
-
-++++
-ant test -Dmanual=true -Dsqoop.thirdparty.lib.dir=/path/to/jdbc/drivers/
-++++
-
-Note that +sqoop.thirdparty.lib.dir+ can also be specified in
-+build.properties+.
-
-== Code Quality Analysis
-
-We have three tools which can be used to analyze Sqoop's code quality.
-
-=== Findbugs
-
-Findbugs detects common errors in programming. New patches should not
-trigger additional warnings in Findbugs.
-
-Install findbugs (1.3.9) according to its instructions. To use it,
-run:
-
-++++
-ant findbugs -Dfindbugs.home=/path/to/findbugs/
-++++
-
-A report will be generated in +build/findbugs/+
-
-=== Cobertura
-
-Cobertura runs code coverage checks. It instruments the build and
-checks that each line and conditional expression is evaluated along
-all possible paths.
-
-Install Cobertura according to its instructions. Then run a test with:
-
-++++
-ant clean
-ant cobertura -Dcobertura.home=/path/to/cobertura
-ant cobertura -Dcobertura.home=/path/to/cobertura \
- -Dthirdparty=true -Dsqoop.thirdparty.lib.dir=/path/to/thirdparty
-++++
-
-(You'll need to run the cobertura target twice; once against the regular
-test targets, and once against the thirdparty targets.)
-
-When complete, the report will be placed in +build/cobertura/+
-
-New patches should come with sufficient tests for their functionality
-as well as their error recovery code paths. Cobertura can help assess
-whether your tests are thorough enough, or where gaps lie.
-
-=== Checkstyle
-
-Checkstyle enforces our style guide. There are currently a very small
-number of violations of this style in the source code, but hopefully this
-will remain the case. New code should not trigger additional checkstyle
-warnings.
-
-Checkstyle does not need to be installed manually; it will be retrieved via
-Ivy when necessary.
-
-To run checkstyle, execute:
-
-++++
-ant checkstyle
-++++
-
-A report will be generated as +build/checkstyle-errors.html+
-
-
-== Deploying to Maven
-
-To use Sqoop as a dependency in other projects, you can pull Sqoop into your
-dependency management system through Maven.
-
-To install Sqoop in your local +.m2+ cache, run:
-
-++++
-ant mvn-install
-++++
-
-This will install a pom and the Sqoop jar.
-
-To deploy Sqoop to a public repository, use:
-
-++++
-ant mvn-deploy
-++++
-
-By default, this deploys to repository.cloudera.com. You can choose
-the complete URL to deploy to with the +mvn.deploy.url+ property.
-By default, this deploys to the "snapshots" repository. To deploy to
-"staging" or "releases" on repository.cloudera.com, set the
-+mvn.repo+ property accordingly.
-
-== Releasing Sqoop
-
-To build a full release of Sqoop, run +ant release -Dversion=(somever)+.
-This will build a binary release tarball and the web-based documentation
-as well as run a release audit which flags any source files which may
-be missing license headers.
-
-(The release audit can be run standalone with the +ant releaseaudit+
-target.)
-
-You must set the +version+ property explicitly; you cannot release a
-snapshot. To simultaneously deploy this to a maven repository, include
-the +mvn-install+ or +mvn-deploy+ targets as well.
-
-
-== Using Eclipse
-
-Running +ant eclipse+ will generate +.project+ and +.classpath+ files that
-will allow you to edit Sqoop sources in Eclipse with all the library
-dependencies correctly resolved. To compile the jars, you should still
-use ant.
-
View
202 LICENSE.txt
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
View
3 NOTICE.txt
@@ -1,3 +0,0 @@
-This product includes software developed by Cloudera, Inc.
-(http://www.cloudera.com/).
-
View
57 README.txt
@@ -1,46 +1,21 @@
+# Sqoop has moved to Apache!
-= Welcome to Sqoop!
+The Sqoop team is pleased to announce that Sqoop has been migrated
+to Apache Software Foundation and is currently undergoing incubation.
-This is the Sqoop (SQL-to-Hadoop) tool. Sqoop allows easy imports and
-exports of data sets between databases and HDFS.
+All the sources have been migrated to Apache Subversion and can be
+accessed from there, including relevant history of individual sources.
+If you are interested in a past release of Sqoop, you can clone this
+repository and go back to the specific release tag.
+***
+# Apache Sqoop
-== More Documentation
-
-Sqoop ships with additional documentation: a user guide and a manual page.
-
-Asciidoc sources for both of these are in +src/docs/+. Run +ant docs+ to build
-the documentation. It will be created in +build/docs/+.
-
-If you got Sqoop in release form, documentation will already be built and
-available in the +docs/+ directory.
-
-
-== Compiling Sqoop
-
-Compiling Sqoop requires the following tools:
-
-* Apache ant (1.7.1)
-* Java JDK 1.6
-
-Additionally, building the documentation requires these tools:
-
-* asciidoc
-* make
-* python 2.5+
-* xmlto
-* tar
-* gzip
-
-To compile Sqoop, run +ant package+. There will be a fully self-hosted build
-provided in the +build/sqoop-(version)/+ directory.
-
-You can build just the jar by running +ant jar+.
-
-See the COMPILING.txt document for for information.
-
-== This is also an Asciidoc file!
-
-* Try running +asciidoc README.txt+
-* For more information about asciidoc, see http://www.methods.co.nz/asciidoc/
+Please read the [transition notes](https://cwiki.apache.org/confluence/display/SQOOP/Transition+from+Cloudera)
+for important information regarding the transition of Sqoop to Apache Software Foundation.
+* [Project Status](http://incubator.apache.org/projects/sqoop.html)
+* [Sqoop Wiki](https://cwiki.apache.org/confluence/display/SQOOP/Home)
+* [Source Repository](https://svn.apache.org/repos/asf/incubator/sqoop/trunk/)
+* [Issue Tracking System](https://issues.apache.org/jira/browse/SQOOP)
+* [New Mailing Lists](https://cwiki.apache.org/confluence/display/SQOOP/Mailing+Lists)
View
16 bin/.gitignore
@@ -1,16 +0,0 @@
-# Licensed to Cloudera, Inc. under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-/sqoop-*
-
View
114 bin/configure-sqoop
@@ -1,114 +0,0 @@
-#!/bin/bash
-#
-# Licensed to Cloudera, Inc. under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is sourced in by bin/sqoop to set environment variables prior to
-# invoking Hadoop.
-
-bin="$1"
-
-if [ -z "${bin}" ]; then
- bin=`dirname $0`
- bin=`cd ${bin} && pwd`
-fi
-
-if [ -z "$SQOOP_HOME" ]; then
- export SQOOP_HOME=${bin}/..
-fi
-
-# Find paths to our dependency systems. If they are unset, use CDH defaults.
-
-if [ -z "${HADOOP_HOME}" ]; then
- HADOOP_HOME=/usr/lib/hadoop
-fi
-if [ -z "${HBASE_HOME}" ]; then
- HBASE_HOME=/usr/lib/hbase
-fi
-
-# Check: If we can't find our dependencies, give up here.
-if [ ! -d "${HADOOP_HOME}" ]; then
- echo "Error: $HADOOP_HOME does not exist!"
- echo 'Please set $HADOOP_HOME to the root of your Hadoop installation.'
- exit 1
-fi
-
-## Moved to be a runtime check in sqoop.
-if [ ! -d "${HBASE_HOME}" ]; then
- echo "Warning: $HBASE_HOME does not exist! HBase imports will fail."
- echo 'Please set $HBASE_HOME to the root of your HBase installation.'
-fi
-
-# Where to find the main Sqoop jar
-SQOOP_JAR_DIR=$SQOOP_HOME
-
-# If there's a "build" subdir, override with this, so we use
-# the newly-compiled copy.
-if [ -d "$SQOOP_JAR_DIR/build" ]; then
- SQOOP_JAR_DIR="${SQOOP_JAR_DIR}/build"
-fi
-
-function add_to_classpath() {
- dir=$1
- for f in $dir/*.jar; do
- SQOOP_CLASSPATH=${SQOOP_CLASSPATH}:$f;
- done
-
- export SQOOP_CLASSPATH
-}
-
-# Add sqoop dependencies to classpath.
-SQOOP_CLASSPATH=""
-if [ -d "$SQOOP_HOME/lib" ]; then
- add_to_classpath $SQOOP_HOME/lib
-fi
-
-# Add HBase to dependency list
-if [ -e "$HBASE_HOME/bin/hbase" ]; then
- TMP_SQOOP_CLASSPATH=${SQOOP_CLASSPATH}:`$HBASE_HOME/bin/hbase classpath`
- SQOOP_CLASSPATH=${TMP_SQOOP_CLASSPATH}
-fi
-
-ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper}
-if [ -d "${ZOOCFGDIR}" ]; then
- SQOOP_CLASSPATH=$ZOOCFGDIR:$SQOOP_CLASSPATH
-fi
-
-SQOOP_CONF_DIR=${SQOOP_CONF_DIR:-${SQOOP_HOME}/conf}
-SQOOP_CLASSPATH=${SQOOP_CONF_DIR}:${SQOOP_CLASSPATH}
-
-# If there's a build subdir, use Ivy-retrieved dependencies too.
-if [ -d "$SQOOP_HOME/build/ivy/lib/sqoop" ]; then
- for f in $SQOOP_HOME/build/ivy/lib/sqoop/*/*.jar; do
- SQOOP_CLASSPATH=${SQOOP_CLASSPATH}:$f;
- done
-fi
-
-add_to_classpath ${SQOOP_JAR_DIR}
-
-HADOOP_CLASSPATH="${SQOOP_CLASSPATH}:${HADOOP_CLASSPATH}"
-if [ ! -z "$SQOOP_USER_CLASSPATH" ]; then
- # User has elements to prepend to the classpath, forcibly overriding
- # Sqoop's own lib directories.
- export HADOOP_CLASSPATH="${SQOOP_USER_CLASSPATH}:${HADOOP_CLASSPATH}"
-fi
-
-export SQOOP_CLASSPATH
-export SQOOP_CONF_DIR
-export SQOOP_JAR_DIR
-export HADOOP_CLASSPATH
-export HADOOP_HOME
-export HBASE_HOME
-
View
98 bin/sqoop
@@ -1,98 +0,0 @@
-#!/bin/bash
-#
-# Licensed to Cloudera, Inc. under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-follow_one() {
- # Resolve symlinks and relative path components along a path. This requires
- # its argument to be an absolute path. This does not recursively re-resolve
- # symlinks; if that is required, use the 'follow' method.
-
- target=$1
- OIFS=$IFS
- IFS='/'
-
- # Taking each dir component along the way, build up a new target directory,
- # resolving '.', '..', and symlinks.
- newtarget=''
- for part in ${target}; do
- if [ -z "${part}" ]; then
- continue # Empty dir part. 'foo//bar'
- elif [ "." == "${part}" ]; then
- continue # Nothing special to do for '.'
- elif [ ".." == "${part}" ]; then
- IFS=$OIFS
- newtarget=`dirname ${newtarget}` # pop a component.
- elif [ -h "${newtarget}/${part}" ]; then
- IFS=$OIFS
- link=`readlink ${newtarget}/${part}`
- # links can be relative or absolute. Relative ones get appended to
- # newtarget; absolute ones replace it.
- if [ "${link:0:1}" != "/" ]; then
- newtarget="${newtarget}/${link}" # relative
- else
- newtarget="${link}" # absolute
- fi
- else # Regular file component.
- newtarget="${newtarget}/${part}"
- fi
- IFS='/'
- done
-
- IFS=$OIFS
- echo $newtarget
-}
-
-follow() {
- # Portable 'readlink -f' function to follow a file's links to the final
- # target. Calls follow_one recursively til we're finished tracing symlinks.
-
- target=$1
- depth=$2
-
- if [ -z "$depth" ]; then
- depth=0
- elif [ "$depth" == "1000" ]; then
- # Don't recurse indefinitely; we've probably hit a symlink cycle.
- # Just bail out here.
- echo $target
- return 1
- fi
-
- # Canonicalize the target to be an absolute path.
- targetdir=`dirname ${target}`
- targetdir=`cd ${targetdir} && pwd`
- target=${targetdir}/`basename ${target}`
-
- # Use follow_one to resolve links. Test that we get the same result twice,
- # to terminate iteration.
- first=`follow_one ${target}`
- second=`follow_one ${first}`
- if [ "${first}" == "${second}" ]; then
- # We're done.
- echo "${second}"
- else
- # Need to continue resolving links.
- echo `follow ${second} $(( $depth + 1 ))`
- fi
-}
-
-prgm=`follow $0`
-bin=`dirname ${prgm}`
-bin=`cd ${bin} && pwd`
-
-source ${bin}/configure-sqoop "${bin}"
-exec ${HADOOP_HOME}/bin/hadoop com.cloudera.sqoop.Sqoop "$@"
View
147 bin/start-metastore.sh
@@ -1,147 +0,0 @@
-#!/bin/bash
-#
-# Licensed to Cloudera, Inc. under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This script starts a metastore instance.
-# Usage: start-metastore.sh -p pidfilename -l logdir
-
-prgm=$0
-bin=`dirname $prgm`
-
-while [ ! -z "$1" ]; do
- if [ "$1" == "-p" ]; then
- shift
- pidfilename=$1
- shift
- elif [ "$1" == "-l" ]; then
- shift
- logdir=$1
- shift
- else
- echo "Unknown argument $1"
- exit 1
- fi
-done
-
-# Verify our arguments exist.
-
-if [ -z "${pidfilename}" ]; then
- echo "Missing argument: -p pidfilename"
- exit 1
-fi
-
-if [ -z "${logdir}" ]; then
- echo "Missing argument: -l logdir"
- exit 1
-fi
-
-if [ ! -d "${logdir}" ]; then
- echo "Warning: Log directory ${logdir} does not exist."
-fi
-
-function pid_file_alive() {
- local pidfile=$1 # IN
- local programname=$2 # IN
- local checkpid=`cat "$pidfile"`
- ps -fp $checkpid | grep $checkpid | grep "$programname" > /dev/null 2>&1
-}
-
-function fail_if_pid_exists() {
- local pidfile=$1 # IN
- local programname=$2 # IN
- if pid_file_alive "$pidfile" "$programname" ; then
- echo "Pid file $pidfile already exists; not starting metastore."
- exit 1
- fi
-}
-
-# Acquire the pidfile lock.
-if [ -e "$pidfilename" ]; then
- # If the pid file exists, check to see if the process is alive.
- # We first write our own (bash script) pid into the pidfile.
- # Then we write the child pid over top; the bash script then terminates.
- # So we must be prepared to accept either case.
-
- # We must check for bash first. Serialization matters.
- fail_if_pid_exists "$pidfilename" "bash"
- fail_if_pid_exists "$pidfilename" "sqoop"
-
- # We're good to go. Remove the existing pidfile.
- existingpid=`cat $pidfilename`
- [[ -e "$pidfilename" ]] && rm "$pidfilename"
- [[ -e "$pidfilename.$existingpid" ]] && rm "$pidfilename.$existingpid"
-fi
-
-pid=$$
-echo $pid > "$pidfilename.$pid"
-if [ ! -e "$pidfilename.$pid" ]; then
- echo "Could not create pid file $pidfilename.$pid; not starting metastore."
- exit 1
-fi
-
-# Hardlink the "real" pidfile to our temporary one. This is atomic.
-ln "$pidfilename.$pid" "$pidfilename"
-
-# Verify that the real pidfile exists, and contains our current pid.
-if [ ! -e "$pidfilename" ]; then
- echo "Could not create pid file $pidfilename; not starting metastore."
- exit 1
-fi
-
-val=`cat "$pidfilename"`
-if [ "$val" != "$pid" ]; then
- # We lost the pid file race.
- echo "Metastore already started; not starting metastore."
- exit 1
-fi
-
-# Determine the log file name.
-user=`id -un`
-host=`hostname`
-
-# Log file name we would like to use.
-logfile="$logdir/sqoop-metastore-$user-$host.log"
-touch $logfile >/dev/null 2>&1
-if [ "$?" != "0" ]; then
- # Can't open for logging.
- echo "Warning: Cannot write to log directory. Disabling metastore log."
- logfile=/dev/null
-fi
-
-# Actually start the metastore.
-
-if [ ! -z "$bin" ]; then
- bin="$bin/"
-fi
-
-nohup "$bin/sqoop" metastore > "$logfile" 2>&1 </dev/null &
-ret=$?
-realpid=$!
-
-if [ "$ret" != "0" ]; then
- echo "Error starting metastore."
- rm "$pidfilename"
- rm "$pidfilename.$pid"
- exit $ret
-fi
-
-# Now replace the pid in the pidfile with the value in $realpid.
-echo $realpid > "$pidfilename"
-
-# The original pid file with the extension is no longer necessary.
-rm "$pidfilename.$pid"
-
-
View
58 bin/stop-metastore.sh
@@ -1,58 +0,0 @@
-#!/bin/bash
-#
-# Licensed to Cloudera, Inc. under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This script stops a metastore instance.
-# Usage: stop-metastore.sh -p pidfilename
-
-prgm=$0
-bin=`dirname $prgm`
-
-while [ ! -z "$1" ]; do
- if [ "$1" == "-p" ]; then
- shift
- pidfilename=$1
- shift
- else
- echo "Unknown argument $1"
- exit 1
- fi
-done
-
-# Verify our arguments exist.
-
-if [ -z "${pidfilename}" ]; then
- echo "Missing argument: -p pidfilename"
- exit 1
-fi
-
-# Shut down any running metastore.
-
-if [ ! -z "$bin" ]; then
- bin="$bin/"
-fi
-
-HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-ERROR,console} \
- "$bin/sqoop" metastore --shutdown 2>&1 >/dev/null
-ret=$?
-if [ "$ret" != "0" ]; then
- echo "Could not shut down metastore."
- exit $ret
-fi
-
-# Remove the pidfile lock.
-
-rm -f "$pidfilename"
View
1,068 build.xml
@@ -1,1068 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
- Licensed to Cloudera, Inc. under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- Cloudera, Inc. licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<project name="sqoop" default="jar-all"
- xmlns:artifact="urn:maven-artifact-ant"
- xmlns:ivy="antlib:org.apache.ivy.ant">
-
- <!-- Load system-wide and project-wide default properties set by
- the user, to avoid needing to override with -D. -->
- <property file="${user.home}/build.properties" />
- <property file="${basedir}/build.properties" />
-
- <!-- some basic properties -->
- <property environment="env"/>
- <property name="name" value="sqoop" />
- <property name="Name" value="Sqoop" />
- <property name="version" value="1.4.0-SNAPSHOT" />
-
- <!-- The last version released. -->
- <property name="oldversion" value="1.3.0" />
- <!-- The point when we branched for the previous release. -->
- <property name="prev.git.hash"
- value="166417fdb0afd3141c6fd0fdc50043b5e13c2a97" />
-
- <property name="artifact.name" value="${name}-${version}" />
- <property name="dest.jar" value="${artifact.name}.jar" />
- <property name="test.jar" value="${name}-test-${version}.jar" />
- <property name="git.hash" value="" />
-
- <!-- programs used -->
- <property name="python" value="python" />
-
- <!-- locations in the source tree -->
- <property name="base.src.dir" location="${basedir}/src" />
- <property name="src.dir" location="${base.src.dir}/java" />
- <property name="test.dir" location="${base.src.dir}/test" />
- <property name="perftest.src.dir" location="${base.src.dir}/perftest" />
- <property name="lib.dir" location="${basedir}/lib" />
- <property name="docs.src.dir" location="${base.src.dir}/docs" />
- <property name="script.src.dir" location="${base.src.dir}/scripts" />
-
- <!-- base directory for all build/test process output -->
- <property name="build.dir" location="${basedir}/build" />
-
- <!-- generated bin scripts -->
- <property name="build.bin.dir" location="${build.dir}/bin" />
-
- <!-- generated source code -->
- <property name="build.src.dir" location="${build.dir}/src" />
-
- <!-- staging area for *-sources.jar files -->
- <property name="build.srcjar.dir" location="${build.dir}/srcjars" />
-
- <!-- compiled classes for the main sqoop artifact. -->
- <property name="build.classes" location="${build.dir}/classes"/>
-
- <!-- root directory for output/intermediate data for testing -->
- <property name="build.test" location="${build.dir}/test"/>
- <property name="test.log.dir" location="${build.dir}/test/logs"/>
-
- <!-- compiled test classes -->
- <property name="build.test.classes" location="${build.test}/classes" />
-
- <!-- compiled "perftest" programs -->
- <property name="build.perftest" location="${build.dir}/perftest"/>
- <property name="build.perftest.classes" location="${build.perftest}/classes"/>
-
- <!-- generated documentation output directory -->
- <property name="build.javadoc" location="${build.dir}/docs/api" />
-
- <!-- Target dir for release notes file. -->
- <property name="build.relnotes.dir" location="${build.dir}/docs" />
- <property name="relnotes.filename"
- location="${build.relnotes.dir}/sqoop-${version}.releasenotes.html" />
-
- <property name="dist.dir" location="${build.dir}/${artifact.name}" />
- <property name="tar.file" location="${build.dir}/${artifact.name}.tar.gz" />
- <property name="build.docs.timestamp"
- location="${build.dir}/docs.timestamp" />
-
- <!-- compilation -->
- <property name="javac.deprecation" value="off"/>
- <property name="javac.debug" value="on"/>
- <property name="build.encoding" value="ISO-8859-1"/>
-
- <!-- testing with JUnit -->
- <property name="test.junit.output.format" value="plain"/>
- <property name="test.output" value="no"/>
- <property name="test.timeout" value="300000"/>
-
- <!-- static analysis -->
- <property name="findbugs.out.dir" value="${build.dir}/findbugs" />
- <property name="findbugs.output.xml.file"
- value="${findbugs.out.dir}/report.xml" />
- <property name="findbugs.output.html.file"
- value="${findbugs.out.dir}/report.html" />
- <property name="findbugs.excludes"
- location="${test.dir}/findbugsExcludeFile.xml" />
-
- <!-- maven -->
- <property name="mvn.build.dir" value="${build.dir}/m2" />
- <property name="mvn.repo" value="snapshots" />
- <property name="mvn.repo.id" value="cloudera.${mvn.repo}.repo" />
- <property name="mvn.deploy.url"
- value="https://repository.cloudera.com/content/repositories/${mvn.repo}"/>
- <property name="sqoop.pom" value="${mvn.build.dir}/sqoop.pom" />
- <property name="sqooptest.pom" value="${mvn.build.dir}/sqoop-test.pom" />
-
- <!-- code coverage -->
- <property name="cobertura.dir" value="${build.dir}/cobertura" />
- <property name="cobertura.home" value="${cobertura.dir}" />
- <property name="cobertura.report.dir" value="${cobertura.dir}/report" />
- <property name="cobertura.format" value="html" /> <!-- may be 'xml' -->
- <property name="cobertura.class.dir" value="${cobertura.dir}/classes" />
-
- <!-- Checking code style -->
- <property name="checkstyle.xml" value="${test.dir}/checkstyle.xml" />
- <property name="checkstyle.format.xsl"
- value="${test.dir}/checkstyle-noframes.xsl" />
- <property name="checkstyle.report.dir" value="${build.dir}" />
-
- <!-- Release audit -->
- <property name="rat.reporting.classname" value="rat.Report"/>
-
- <!-- When testing with non-free JDBC drivers, override this parameter
- to contain the path to the driver library dir.
- -->
- <property name="sqoop.thirdparty.lib.dir" value="" />
-
- <!-- Ivy-based dependency resolution -->
- <property name="ivy.dir" location="${basedir}/ivy" />
- <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml"/>
- <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
- <property name="ivy.jar" location="${lib.dir}/ivy-${ivy.version}.jar"/>
- <property name="ivy_repo_url"
- value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar" />
- <property name="mvn_repo_url"
- value="http://repo2.maven.org/maven2/org/apache/maven/maven-ant-tasks/${mvn.version}/maven-ant-tasks-${mvn.version}.jar"/>
- <property name="mvn.jar"
- location="${build.dir}/maven-ant-tasks-${mvn.version}.jar" />
- <property name="build.ivy.dir" location="${build.dir}/ivy" />
- <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
- <property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
- <property name="redist.ivy.lib.dir"
- location="${build.ivy.lib.dir}/${name}/redist"/>
-
- <!--this is the naming policy for artifacts we want pulled down-->
- <property name="ivy.artifact.retrieve.pattern"
- value="${name}/[conf]/[artifact]-[revision](-[classifier]).[ext]"/>
-
- <!--test related properties -->
- <property name="sqoop.test.oracle.connectstring"
- value="jdbc:oracle:thin:@//localhost/xe"/>
-
- <property name="sqoop.test.mysql.connectstring.host_url"
- value="jdbc:mysql://localhost/"/>
-
- <property name="sqoop.test.postgresql.connectstring.host_url"
- value="jdbc:postgresql://localhost/"/>
-
- <property name="sqoop.test.sqlserver.connectstring.host_url"
- value="jdbc:sqlserver://sqlserverhost:1433"/>
-
- <!-- load ant-contrib tasks to get the "if" task. -->
- <taskdef resource="net/sf/antcontrib/antcontrib.properties">
- <classpath>
- <pathelement location="${basedir}/lib/ant-contrib-1.0b3.jar"/>
- </classpath>
- </taskdef>
-
- <!-- The classpath for compiling and running Sqoop -->
- <if>
- <isset property="hadoop.home" />
- <then>
- <path id="compile.classpath">
- <pathelement location="${build.classes}"/>
- <path refid="lib.path"/>
- <fileset dir="${hadoop.home}">
- <include name="hadoop-core-*.jar" />
- <include name="hadoop-*-core.jar" />
- <include name="hadoop-common-*.jar" />
- <include name="hadoop-mapred-*.jar" />
- <include name="hadoop-hdfs-*.jar" />
- </fileset>
- <fileset dir="${hadoop.home}/lib">
- <include name="*.jar" />
- </fileset>
- <path refid="${name}.hadoop.classpath"/>
- </path>
- </then>
- <else>
- <path id="compile.classpath">
- <pathelement location="${build.classes}"/>
- <path refid="lib.path"/>
- <path refid="${name}.hadoop.classpath"/>
- </path>
- </else>
- </if>
-
- <path id="cobertura.classpath">
- <fileset dir="${cobertura.home}">
- <include name="**/*.jar" />
- </fileset>
- </path>
-
- <!-- "init" target used for setup purposes. -->
- <target name="init">
- <!-- Path containing third-party libraries deployed directly with Sqoop.
- This does not include anything that Ivy can retrieve for us.
- -->
- <path id="lib.path">
- <fileset dir="${lib.dir}">
- <include name="*.jar" />
- </fileset>
- </path>
-
- <!-- Classpath for unit tests (superset of compile.classpath) -->
- <path id="test.classpath">
- <pathelement location="${build.test.classes}" />
- <path refid="${name}.hadooptest.classpath" />
- <path refid="compile.classpath" />
- </path>
- </target>
-
- <!-- generate the version information class. -->
- <target name="gen-version" depends="init">
- <exec executable="${script.src.dir}/write-version-info.sh"
- dir="${basedir}" failonerror="true">
- <arg value="${build.dir}" />
- <arg value="${version}" />
- <arg value="${git.hash}" />
- </exec>
- </target>
-
- <!-- Compile core classes for the project -->
- <target name="compile"
- depends="init, gen-version, ivy-retrieve-hadoop"
- description="Compile core classes for the project">
- <!-- don't use an out-of-date instrumented build. -->
- <delete dir="${cobertura.class.dir}" />
- <!-- ensure normal build target dir exists -->
- <mkdir dir="${build.classes}" />
-
- <!-- Compile generated code first. -->
- <javac
- encoding="${build.encoding}"
- srcdir="${build.src.dir}"
- includes="**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- deprecation="${javac.deprecation}">
- <classpath refid="compile.classpath"/>
- </javac>
-
- <!-- Compile the main code. -->
- <javac
- encoding="${build.encoding}"
- srcdir="${src.dir}"
- includes="**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- deprecation="${javac.deprecation}">
- <classpath refid="compile.classpath"/>
- </javac>
- </target>
-
- <target name="compile-test"
- depends="compile, ivy-retrieve-hadoop-test"
- description="Compile test classes">
- <mkdir dir="${build.test.classes}" />
- <javac
- encoding="${build.encoding}"
- srcdir="${test.dir}"
- includes="**/*.java"
- destdir="${build.test.classes}"
- debug="${javac.debug}">
- <classpath>
- <path refid="test.classpath"/>
- </classpath>
- </javac>
- </target>
-
- <target name="compile-perf-test"
- depends="compile, ivy-retrieve-hadoop-test"
- description="Compile manual performance tests">
- <mkdir dir="${build.perftest.classes}" />
- <javac
- encoding="${build.encoding}"
- srcdir="${perftest.src.dir}"
- includes="**/*.java"
- destdir="${build.perftest.classes}"
- debug="${javac.debug}">
- <classpath>
- <path refid="test.classpath"/>
- </classpath>
- </javac>
- </target>
-
- <target name="jar" depends="compile" description="Create main jar">
- <jar jarfile="${build.dir}/${dest.jar}" basedir="${build.classes}" />
- </target>
-
- <target name="jar-test" depends="compile-test" description="Create test jar">
- <jar jarfile="${build.dir}/${test.jar}" basedir="${build.test.classes}" />
- </target>
-
- <!-- Ensure that all source code can be built -->
- <target name="compile-all"
- depends="compile,compile-test,compile-perf-test"
- description="Compile all sources"/>
-
- <!-- Create all jars. Note this does not include the perftests. -->
- <target name="jar-all" depends="jar,jar-test"
- description="Create all jar artifacts" />
-
- <target name="scripts" depends="jar"
- description="Create tool-specific wrapper scripts">
- <!-- Take the list of available tools from 'sqoop help' and generate
- the wrapper scripts to invoke each of these.
- -->
- <mkdir dir="${build.bin.dir}" />
- <java classname="com.cloudera.sqoop.Sqoop"
- fork="true"
- failonerror="true"
- output="${build.dir}/tools-list"
- error="/dev/null">
- <jvmarg value="-Dhadoop.security.log.file=./build/security-audit.log" />
- <arg value="help" />
- <classpath refid="compile.classpath"/>
- </java>
- <exec executable="${script.src.dir}/create-tool-scripts.sh"
- dir="${basedir}" failonerror="true">
- <arg value="${build.bin.dir}" />
- <arg value="${script.src.dir}/tool-script.sh.template" />
- <arg value="${build.dir}/tools-list" />
- </exec>
- </target>
-
- <target name="package"
- depends="jar-all,compile-all,docs,ivy-retrieve-redist,scripts"
- description="Create a redistributable package">
-
- <mkdir dir="${dist.dir}"/>
-
- <!-- copy in the build artifact -->
- <copy todir="${dist.dir}" includeEmptyDirs="false" flatten="true">
- <fileset dir="${build.dir}">
- <include name="${dest.jar}" />
- </fileset>
- </copy>
-
- <!-- copy the test artifact -->
- <copy todir="${dist.dir}" includeEmptyDirs="false" flatten="true">
- <fileset dir="${build.dir}">
- <include name="${test.jar}" />
- </fileset>
- </copy>
-
- <!-- copy in various components of the initial source layout
- so that the redistributable can bootstrap itself. -->
- <copy todir="${dist.dir}" includeEmptyDirs="false" flatten="false">
- <fileset dir="${basedir}">
- <include name="**/*" />
- <exclude name="build/**" />
- <exclude name="lib/**" />
- <exclude name=".git/**" />
- <exclude name="tags" />
- <exclude name=".project" />
- <exclude name=".classpath" />
- <exclude name="conf/managers.d/**" />
- <exclude name="conf/tools.d/**" />
- </fileset>
- </copy>
-
- <!-- copy the dependency libraries from ivy into the output lib dir -->
- <mkdir dir="${dist.dir}/lib"/>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
- <fileset dir="${redist.ivy.lib.dir}">
- <include name="**/*.jar" />
- </fileset>
- </copy>
-
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
- <fileset dir="${lib.dir}">
- <include name="**/*" />
- <exclude name="ivy*" />
- </fileset>
- </copy>
-
- <!-- copy in documentation build artifacts -->
- <copy todir="${dist.dir}/docs" includeEmptyDirs="false" flatten="false">
- <fileset dir="${build.dir}/docs">
- <include name="**/*.html" />
- <include name="**/*.css" />
- <include name="images/**" />
- </fileset>
- </copy>
- <copy todir="${dist.dir}/docs/man" includeEmptyDirs="false" flatten="false">
- <fileset dir="${build.dir}/docs">
- <include name="**/*.gz" />
- </fileset>
- </copy>
-
- <!-- copy in auto-generated bin scripts -->
- <copy todir="${dist.dir}/bin" includeEmptyDirs="false" flatten="true">
- <fileset dir="${build.bin.dir}">
- <include name="*" />
- </fileset>
- </copy>
-
- <!-- make sure the bin scripts are executable. -->
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}/bin" />
- </chmod>
-
- <!-- make sure any scripts named *.sh are executable. -->
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}">
- <include name="**/*.sh" />
- </fileset>
- </chmod>
-
- <!-- In the configuration directory, take the sqoop-site-template
- and copy it to sqoop-site.xml, overwriting any user-specified
- sqoop-site.xml in there.
- -->
- <copy file="${dist.dir}/conf/sqoop-site-template.xml"
- tofile="${dist.dir}/conf/sqoop-site.xml"
- overwrite="true" />
- </target>
-
- <target name="tar" depends="package" description="Create release tarball">
- <tar compression="gzip" longfile="gnu" destfile="${tar.file}">
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${artifact.name}/bin/*" />
- <exclude name="${artifact.name}/**/*.sh" />
- <include name="${artifact.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${artifact.name}/bin/*" />
- <include name="${artifact.name}/**/*.sh" />
- </tarfileset>
- </tar>
- </target>
-
- <!-- set variables that configure the actual test -->
- <target name="test-prep" depends="test-prep-normal,test-prep-thirdparty,
- test-prep-manual"/>
-
- <target name="test-eval-condition">
- <condition property="thirdparty_or_manual">
- <or>
- <isset property="thirdparty"/>
- <isset property="manual"/>
- </or>
- </condition>
- </target>
-
- <target name="test-prep-normal" unless="thirdparty_or_manual"
- depends="test-eval-condition">
- <!-- Set this to run all the "standard" tests -->
- <property name="test.pattern" value="Test*" />
- <property name="cobertura.testset" value="base" />
- </target>
-
- <target name="test-prep-thirdparty" if="thirdparty">
- <!-- Run tests that *end* with the name Test, instead of starting with it;
- this runs non-standard tests e.g. third-party database tests. -->
- <property name="test.pattern" value="*Test" />
- <property name="test.exclude" value="*ManualTest" />
- <property name="cobertura.testset" value="thirdparty" />
- </target>
-
- <target name="test-prep-manual" if="manual">
- <!-- Run tests that are marked for manualtest execution -->
- <property name="test.pattern" value="*ManualTest" />
- <property name="cobertura.testset" value="manual" />
- </target>
-
- <!-- ================================================================== -->
- <!-- Run unit tests -->
- <!-- By default, we'll run the "normal" tests: Test*.java -->
- <!-- To run third-party tests, run with -Dthirdparty=true -->
- <!-- ================================================================== -->
- <target name="test"
- depends="compile-test,compile,test-prep,run-tests"
- description="Run unit tests" />
-
- <!-- actually run the selected unit tests -->
- <target name="run-tests"
- depends="compile-test,compile,test-prep">
- <antcall target="checkfailure" inheritRefs="true" />
- </target>
-
- <target name="test-core">
- <!-- inner target only intended to be used via antcall.
- Does not define its dependencies. Should be invoked through the
- 'test' target. Does not fail the build if tests fail.
- -->
-
- <delete dir="${test.log.dir}"/>
- <mkdir dir="${test.log.dir}"/>
- <delete dir="${build.test}/data"/>
- <mkdir dir="${build.test}/data" />
- <mkdir dir="${cobertura.class.dir}" />
- <junit
- printsummary="yes" showoutput="${test.output}"
- haltonfailure="no" fork="yes" maxmemory="512m"
- errorProperty="tests.failed" failureProperty="tests.failed"
- timeout="${test.timeout}"
- dir="${build.test}/data">
-
- <!-- enable asserts in tests -->
- <jvmarg value="-ea" />
-
- <!-- uncomment this if you want to attach a debugger -->
- <!--
- <jvmarg line="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=2601" />
- -->
-
- <sysproperty key="test.build.data" value="${build.test}/data"/>
- <sysproperty key="build.test" value="${build.test}"/>
-
- <sysproperty key="net.sourceforge.cobertura.datafile"
- value="${cobertura.dir}/cobertura-${cobertura.testset}.ser" />
-
- <!-- define this property to force Sqoop to throw better exceptions on
- errors during testing, instead of printing a short message and
- exiting with status 1.
- -->
- <sysproperty key="sqoop.throwOnError" value="" />
-
- <!-- we want more log4j output when running unit tests -->
- <sysproperty key="hadoop.root.logger"
- value="DEBUG,console" />
-
- <!-- requires fork=yes for:
- relative File paths to use the specified user.dir
- classpath to use build/*.jar
- -->
- <sysproperty key="user.dir" value="${build.test}/data"/>
-
- <!-- Setting the user.dir property is actually meaningless as it
- is read-only in the Linux Sun JDK. Provide an alternate sysprop
- to specify where generated code should go.
- -->
- <sysproperty key="sqoop.src.dir" value="${build.test}/data"/>
-
- <!-- Override standalone Hadoop's working dirs to allow parallel
- execution of multiple Hudson builders
- -->
- <sysproperty key="hadoop.tmp.dir" value="${build.test}/hadoop"/>
-
- <sysproperty key="fs.default.name" value="${fs.default.name}"/>
- <sysproperty key="hadoop.test.localoutputfile"
- value="${hadoop.test.localoutputfile}"/>
- <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
-
- <!-- we have a mock "hive" shell instance in our testdata directory
- for testing hive integration. Set this property here to ensure
- that the unit tests pick it up.
- -->
- <sysproperty key="hive.home" value="${basedir}/testdata/hive" />
-
- <!-- By default the Oracle tests assume an Oracle XE installation
- with a hardcoded connection string. If you want to overwrite
- that specify the value at command line or via
- build.properties file.
- -->
- <sysproperty key="sqoop.test.oracle.connectstring"
- value="${sqoop.test.oracle.connectstring}"/>
-
- <sysproperty key="sqoop.test.mysql.connectstring.host_url"
- value="${sqoop.test.mysql.connectstring.host_url}"/>
-
- <sysproperty key="sqoop.test.postgresql.connectstring.host_url"
- value="${sqoop.test.postgresql.connectstring.host_url}"/>
-
- <sysproperty key="sqoop.test.sqlserver.connectstring.host_url"
- value="${sqoop.test.sqlserver.connectstring.host_url}"/>
-
- <classpath>
- <!-- instrumented classes go ahead of normal classes -->
- <pathelement location="${cobertura.class.dir}" />
-
- <!-- main classpath here. -->
- <path refid="test.classpath" />
-
- <!-- need thirdparty JDBC drivers for thirdparty tests -->
- <fileset dir="${sqoop.thirdparty.lib.dir}"
- includes="*.jar" />
-
- <!-- include cobertura itself on the classpath -->
- <path refid="cobertura.classpath" />
- </classpath>
- <formatter type="${test.junit.output.format}" />
- <batchtest todir="${build.test}" unless="testcase">
- <fileset dir="${test.dir}"
- includes="**/${test.pattern}.java"
- excludes="**/${test.exclude}.java" />
- </batchtest>
- <batchtest todir="${build.test}" if="testcase">
- <fileset dir="${test.dir}" includes="**/${testcase}.java"/>
- </batchtest>
- </junit>
- </target>
-
- <target name="releaseaudit" depends="package,ivy-retrieve-releaseaudit"
- description="Audit license headers for release">
- <fail unless="rat.present"
- message="Failed to load class [${rat.reporting.classname}]." />
- <java classname="${rat.reporting.classname}" fork="true"
- output="${build.dir}/rat.log">
- <classpath refid="${name}.releaseaudit.classpath" />
- <arg value="${dist.dir}" />
- </java>
- <exec executable="${script.src.dir}/rat-violations.sh" failOnError="true">
- <arg value="${build.dir}/rat.log" />
- <arg value="${dist.dir}" />
- </exec>
- <echo message="Release audit appears okay. Full results are in " />
- <echo message="${build.dir}/rat.log" />
- </target>
-
- <target name="docs-uptodate" depends="init">
- <uptodate property="docs.uptodate">
- <srcfiles dir="${basedir}/src/docs/">
- <include name="**/*.txt" />
- </srcfiles>
- <mapper type="merge" to="${build.docs.timestamp}" />
- </uptodate>
- </target>
-
- <target name="checkversion">
- <if>
- <contains string="${version}" substring="SNAPSHOT" />
- <then>
- <fail message="Error: cannot release a snapshot. Set -Dversion" />
- </then>
- </if>
- </target>
-
- <target name="relnotes-uptodate" depends="init">
- <!-- releasenotes are considered up-to-date if they exist. -->
- <available property="relnotes.exists" file="${relnotes.filename}" />
-
- <!-- if we're building a snapshot release, don't make release notes. -->
- <if>
- <contains string="${version}" substring="SNAPSHOT" />
- <then>
- <property name="relnotes.exists" value="true" />
- </then>
- </if>
- </target>
-
- <target name="relnotes" depends="relnotes-uptodate" unless="relnotes.exists"
- description="Generate release notes">
- <exec executable="${python}" failonerror="yes">
- <arg value="${script.src.dir}/relnotes.py" />
- <arg value="${build.relnotes.dir}" />
- <arg value="${basedir}" />
- <arg value="${prev.git.hash}..HEAD" />
- <arg value="${version}" />
- <arg value="${oldversion}" />
- </exec>
- </target>
-
- <target name="release" depends="checkversion,tar,releaseaudit"
- description="Roll a release artifact">
- <echo message="Release complete" />
- <echo message="Binary tar: ${tar.file}" />
- <echo message="Documentation: ${build.dir}/docs" />
- <echo message="Release notes: ${relnotes.filename}" />
- <echo message="Release audit report: ${build.dir}/rat.log" />
- </target>
-
- <target name="mvn-prep" depends="init,ivy-resolve-test">
- <!-- prepare for mvn tasks. -->
-
- <!-- Download maven -->
- <mkdir dir="${build.dir}" />
- <get src="${mvn_repo_url}" dest="${mvn.jar}" usetimestamp="true" />
-
- <!-- Register mvn tasks -->
- <path id="mvn-ant-task.classpath" path="${mvn.jar}" />
- <typedef resource="org/apache/maven/artifact/ant/antlib.xml"
- uri="urn:maven-artifact-ant"
- classpathref="mvn-ant-task.classpath"/>
-
- <!-- generate our poms from our ivy files. -->
- <mkdir dir="${mvn.build.dir}" />
- <ivy:makepom ivyfile="ivy/sqoop.xml" pomfile="${sqoop.pom}"
- settingsRef="${name}.ivy.settings">
- <mapping conf="default" scope="compile" />
- <mapping conf="runtime" scope="runtime" />
- </ivy:makepom>
- <ivy:makepom ivyfile="ivy/sqoop-test.xml" pomfile="${sqooptest.pom}"
- settingsRef="${name}.ivy.settings">
- <mapping conf="default" scope="compile" />
- <mapping conf="runtime" scope="runtime" />
- </ivy:makepom>
-
- <!-- Change the version in the pom file to reflect our claimed version. -->
- <replaceregexp>
- <regexp pattern="&lt;version&gt;.*&lt;/version&gt;" />
- <substitution expression="&lt;version&gt;${version}&lt;/version&gt;" />
- <fileset dir="${mvn.build.dir}">
- <include name="*.pom" />
- </fileset>
- </replaceregexp>
- </target>
-
- <target name="srcjars" depends="init,jar-all"
- description="Create source jars">
- <mkdir dir="${build.srcjar.dir}" />
- <jar jarfile="${build.srcjar.dir}/${artifact.name}-sources.jar">
- <fileset dir="${src.dir}" />
- <fileset dir="${build.src.dir}" />
- </jar>
- <jar jarfile="${build.srcjar.dir}/${name}-test-${version}-sources.jar">
- <fileset dir="${test.dir}" />
- </jar>
- </target>
-
- <target name="mvn-install" depends="init,mvn-prep,jar-all,srcjars"
- description="Install Sqoop in local m2 repository">
- <artifact:pom file="${sqoop.pom}" id="sqoop" />
- <artifact:install file="${build.dir}/${dest.jar}">
- <pom refid="sqoop" />
- <attach file="${build.srcjar.dir}/${artifact.name}-sources.jar"
- classifier="sources" />
- </artifact:install>
-
- <artifact:pom file="${sqooptest.pom}" id="sqoop-test" />
- <artifact:install file="${build.dir}/${test.jar}">
- <pom refid="sqoop-test" />
- <attach file="${build.srcjar.dir}/${name}-test-${version}-sources.jar"
- classifier="sources" />
- </artifact:install>
- </target>
-
- <target name="mvn-deploy" depends="init,mvn-prep,jar-all,srcjars"
- description="Deploy Sqoop to public maven repository">
- <artifact:pom file="${sqoop.pom}" id="sqoop" />
- <artifact:deploy file="${build.dir}/${dest.jar}">
- <remoteRepository id="${mvn.repo.id}" url="${mvn.deploy.url}"/>
- <pom refid="sqoop" />
- <attach file="${build.srcjar.dir}/${artifact.name}-sources.jar"
- classifier="sources" />
- </artifact:deploy>
-
- <artifact:pom file="${sqooptest.pom}" id="sqoop-test" />
- <artifact:deploy file="${build.dir}/${test.jar}">
- <remoteRepository id="${mvn.repo.id}" url="${mvn.deploy.url}"/>
- <pom refid="sqoop-test" />
- <attach file="${build.srcjar.dir}/${name}-test-${version}-sources.jar"
- classifier="sources" />
- </artifact:deploy>
-
- </target>
-
- <target name="docs" depends="real-docs,relnotes,javadoc"
- description="Build documentation"/>
-
- <target name="real-docs" depends="docs-uptodate,init" unless="docs.uptodate">
- <exec executable="make" failonerror="true">
- <arg value="-C" />
- <arg value="${basedir}/src/docs" />
- <arg value="BUILDROOT=${build.dir}" />
- <arg value="VERSION=${version}" />
- </exec>
- <touch file="${build.docs.timestamp}" />
- </target>
-
- <target name="javadoc-uptodate" depends="init">
- <uptodate property="javadoc.is.uptodate">
- <srcfiles dir="${src.dir}">
- <include name="**/*.java" />
- <include name="**/*.html" />
- </srcfiles>
- <mapper type="merge" to="${build.javadoc}/index.html" />
- </uptodate>
- </target>
-
- <target name="javadoc" description="Build javadoc"
- depends="init,javadoc-uptodate,compile" unless="javadoc.is.uptodate">
- <mkdir dir="${build.javadoc}" />
- <exec executable="date" outputproperty="year">
- <arg value="+%Y" />
- </exec>
- <javadoc
- packagenames="com.cloudera.sqoop.lib.*"
- destdir="${build.javadoc}"
- author="true"
- version="true"
- use="true"
- windowtitle="${Name} ${version} API"
- doctitle="${Name} ${version} API"
- bottom="Copyright &amp;copy; ${year} Cloudera, Inc.">
- <packageset dir="${src.dir}">
- <include name="com/cloudera/sqoop/lib/**" />
- </packageset>
- <classpath>
- <path refid="compile.classpath" />
- </classpath>
- </javadoc>
- </target>
-
- <target name="checkfailure" depends="test-core" if="tests.failed">
- <touch file="${build.dir}/testsfailed"/>
- <fail unless="continueOnFailure">Unit tests failed!</fail>
- </target>
-
- <target name="clean" description="Clean build target files">
- <delete dir="${build.dir}"/>
- </target>
-
- <target name="clean-cache" description="Remove cached dependencies">
- <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop" />
- <delete dir="${user.home}/.ivy2/cache/com.cloudera.hadoop" />
- <delete dir="${user.home}/.ivy2/cache/com.cloudera.hadoop" />
- <delete file="${ivy.jar}" />
- </target>
-
- <target name="veryclean"
- depends="clean,clean-cache"
- description="Clean build and remove cached dependencies">
- </target>
-
- <target name="findbugs" depends="check-for-findbugs,jar,compile-test"
- if="findbugs.present" description="Run FindBugs">
- <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
- classpath="${findbugs.home}/lib/findbugs-ant.jar" />
- <mkdir dir="${findbugs.out.dir}"/>
- <findbugs home="${findbugs.home}" output="xml:withMessages"
- outputFile="${findbugs.output.xml.file}" effort="max"
- excludeFilter="${findbugs.excludes}" jvmargs="-Xms512m -Xmx512m">
- <auxClasspath>
- <path refid="test.classpath"/>
- </auxClasspath>
- <sourcePath path="${src.dir}" />
- <sourcePath path="${test.dir}" />
- <class location="${build.dir}/${dest.jar}" />
- <class location="${build.test.classes}" />
- </findbugs>
- <xslt style="${findbugs.home}/src/xsl/default.xsl"
- in="${findbugs.output.xml.file}"
- out="${findbugs.output.html.file}" />
- </target>
-
- <target name="warn-findbugs-unset" unless="findbugs.home">
- <fail message="You need to set -Dfindbugs.home=/path/to/findbugs" />
- </target>
-
- <target name="check-for-findbugs" depends="warn-findbugs-unset">
- <available property="findbugs.present"
- file="${findbugs.home}/lib/findbugs.jar" />
- </target>
-
- <target name="cobertura"
- depends="check-for-cobertura,warn-cobertura-unset,jar,compile-test,test-prep"
- if="cobertura.present" description="Run Cobertura (code coverage)">
- <taskdef classpathref="cobertura.classpath"
- resource="tasks.properties"/>
- <mkdir dir="${cobertura.class.dir}" />
- <cobertura-instrument todir="${cobertura.class.dir}"
- datafile="${cobertura.dir}/cobertura-${cobertura.testset}.ser">
- <fileset dir="${build.classes}">
- <include name="**/*.class" />