[hive] collapse separate patches into single integ

Peter MacKinnon pmackinn at fedoraproject.org
Wed Nov 5 17:40:00 UTC 2014


commit 30f1f876bef602a805e561ad882dbfb4db91f2e5
Author: Peter MacKinnon <pmackinn at redhat.com>
Date:   Wed Nov 5 17:39:51 2014 +0000

    collapse separate patches into single integ

 hive-general-build-mods.patch |   71 ----
 hive-hcatalog-mods.patch      |   55 ---
 hive-hwi-mods.patch           |   39 --
 hive-integ.patch              |  928 +++++++++++++++++++++++++++++++++++++++++
 hive-metastore-mods.patch     |   26 --
 hive-model-enhancer-asm.patch |   28 --
 hive-ql-mods.patch            |  302 -------------
 hive-service-mods.patch       |   70 ---
 hive-shims-mods.patch         |   78 ----
 hive.spec                     |   69 ++--
 10 files changed, 961 insertions(+), 705 deletions(-)
---
diff --git a/hive-integ.patch b/hive-integ.patch
new file mode 100644
index 0000000..6048904
--- /dev/null
+++ b/hive-integ.patch
@@ -0,0 +1,928 @@
+diff --git a/build-common.xml b/build-common.xml
+index 940f4e9..cb4d9b5 100644
+--- a/build-common.xml
++++ b/build-common.xml
+@@ -191,7 +191,9 @@
+     <fileset dir="${build.dir.hive}" includes="*/*.jar"/>
+     <fileset dir="${hive.root}/lib" includes="*.jar"/>
+     <fileset dir="${build.ivy.lib.dir}/default" includes="junit*.jar" />
+-    <fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" erroronmissingdir="false" />
++    <fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" 
++             excludes="**/asm-*.jar"
++             erroronmissingdir="false" />
+     <fileset dir="${build.ivy.lib.dir}/default" includes="*.jar"
+              excludes="**/hadoop-*.jar"
+              erroronmissingdir="false"/>
+diff --git a/build.properties b/build.properties
+index 008d1bb..4ae8e16 100644
+--- a/build.properties
++++ b/build.properties
+@@ -75,8 +75,8 @@ common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
+ # module names needed for build process
+ 
+ # full profile
+-iterate.hive.full.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
+-iterate.hive.full.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
++iterate.hive.full.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi
++iterate.hive.full.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi
+ iterate.hive.full.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service,hcatalog
+ iterate.hive.full.thrift=ql,service,metastore,serde
+ iterate.hive.full.protobuf=ql
+@@ -91,8 +91,8 @@ iterate.hive.nohcat.protobuf=ql
+ iterate.hive.nohcat.cpp=odbc
+ 
+ # core profile
+-iterate.hive.core.all=ant,shims,common,serde,metastore,ql,cli
+-iterate.hive.core.modules=shims,common,serde,metastore,ql,cli
++iterate.hive.core.all=ant,shims,common,serde,metastore,ql,service,cli
++iterate.hive.core.modules=shims,common,serde,metastore,ql,service,cli
+ iterate.hive.core.tests=ql
+ iterate.hive.core.thrift=ql
+ iterate.hive.core.protobuf=ql
+diff --git a/hbase-handler/ivy.xml b/hbase-handler/ivy.xml
+index 7be8649..57c0a60 100644
+--- a/hbase-handler/ivy.xml
++++ b/hbase-handler/ivy.xml
+@@ -36,7 +36,7 @@
+     </dependency>
+     <dependency org="com.github.stephenc.high-scale-lib" name="high-scale-lib" rev="1.1.1"
+                 transitive="false"/>
+-    <dependency org="com.yammer.metrics" name="metrics-core" rev="${metrics-core.version}">
++    <dependency org="com.codahale.metrics" name="metrics-core" rev="${metrics-core.version}">
+       <exclude org="org.slf4j" module="slf4j-api"/><!--causes a dual slf4j presence otherwise-->
+     </dependency>
+     <dependency org="org.codehaus.jackson" name="jackson-jaxrs" rev="${jackson.version}"/>
+diff --git a/hcatalog/pom.xml b/hcatalog/pom.xml
+index 499e8c9..d8271ee 100644
+--- a/hcatalog/pom.xml
++++ b/hcatalog/pom.xml
+@@ -33,7 +33,7 @@
+       <hive.version>${project.version}</hive.version>
+       <jackson.version>1.9.2</jackson.version>
+       <jersey.version>1.14</jersey.version>
+-      <jetty.webhcat.version>7.6.0.v20120127</jetty.webhcat.version>
++      <jetty.webhcat.version>8.1.14.v20131031</jetty.webhcat.version>
+       <jms.version>1.1</jms.version>
+       <pig.version>0.10.1</pig.version>
+       <slf4j.version>1.6.1</slf4j.version>
+@@ -109,13 +109,6 @@
+         </dependency>
+         <dependency>
+           <groupId>org.apache.hadoop</groupId>
+-          <artifactId>hadoop-hdfs</artifactId>
+-          <version>${hadoop23.version}</version>
+-          <classifier>tests</classifier>
+-          <scope>compile</scope>
+-        </dependency>
+-        <dependency>
+-          <groupId>org.apache.hadoop</groupId>
+           <artifactId>hadoop-mapreduce-client-core</artifactId>
+           <version>${hadoop23.version}</version>
+           <scope>compile</scope>
+@@ -129,26 +122,12 @@
+         </dependency>
+         <dependency>
+           <groupId>org.apache.hadoop</groupId>
+-          <artifactId>hadoop-yarn-server-tests</artifactId>
+-          <version>${hadoop23.version}</version>
+-          <classifier>tests</classifier>
+-          <scope>compile</scope>
+-        </dependency>
+-        <dependency>
+-          <groupId>org.apache.hadoop</groupId>
+           <artifactId>hadoop-mapreduce-client-app</artifactId>
+           <version>${hadoop23.version}</version>
+           <scope>compile</scope>
+         </dependency>
+         <dependency>
+           <groupId>org.apache.hadoop</groupId>
+-          <artifactId>hadoop-common</artifactId>
+-          <version>${hadoop23.version}</version>
+-          <classifier>tests</classifier>
+-          <scope>compile</scope>
+-        </dependency>
+-        <dependency>
+-          <groupId>org.apache.hadoop</groupId>
+           <artifactId>hadoop-mapreduce-client-hs</artifactId>
+           <version>${hadoop23.version}</version>
+           <scope>compile</scope>
+@@ -163,7 +142,6 @@
+           <groupId>org.apache.pig</groupId>
+           <artifactId>pig</artifactId>
+           <version>${pig.version}</version>
+-          <classifier>h2</classifier>
+           <scope>compile</scope>
+         </dependency>
+       </dependencies>
+@@ -176,7 +154,6 @@
+     <module>server-extensions</module>
+     <module>webhcat/java-client</module>
+     <module>webhcat/svr</module>
+-    <module>storage-handlers/hbase</module>
+   </modules>
+ 
+   <repositories>
+diff --git a/hcatalog/server-extensions/pom.xml b/hcatalog/server-extensions/pom.xml
+index f9ec4a5..6b76bfa 100644
+--- a/hcatalog/server-extensions/pom.xml
++++ b/hcatalog/server-extensions/pom.xml
+@@ -65,6 +65,12 @@
+             <version>${hcatalog.version}</version>
+             <scope>compile</scope>
+         </dependency>
++        <dependency>
++            <groupId>org.apache.hadoop</groupId>
++            <artifactId>hadoop-common</artifactId>
++            <version>${hadoop23.version}</version>
++            <scope>compile</scope>
++        </dependency>
+ 
+         <!-- test scope -->
+         <dependency>
+diff --git a/hcatalog/webhcat/java-client/pom.xml b/hcatalog/webhcat/java-client/pom.xml
+index 97ec5f6..2b692e5 100644
+--- a/hcatalog/webhcat/java-client/pom.xml
++++ b/hcatalog/webhcat/java-client/pom.xml
+@@ -41,5 +41,17 @@
+             <version>${hcatalog.version}</version>
+             <scope>compile</scope>
+         </dependency>
++        <dependency>
++            <groupId>org.apache.hadoop</groupId>
++            <artifactId>hadoop-common</artifactId>
++            <version>${hadoop23.version}</version>
++            <scope>compile</scope>
++        </dependency>
++        <dependency>
++            <groupId>org.apache.hadoop</groupId>
++            <artifactId>hadoop-mapreduce-client-core</artifactId>
++            <version>${hadoop23.version}</version>
++            <scope>compile</scope>
++        </dependency>
+     </dependencies>
+ </project>
+diff --git a/hcatalog/webhcat/svr/pom.xml b/hcatalog/webhcat/svr/pom.xml
+index da6038a..a560604 100644
+--- a/hcatalog/webhcat/svr/pom.xml
++++ b/hcatalog/webhcat/svr/pom.xml
+@@ -89,8 +89,20 @@
+             <scope>compile</scope>
+         </dependency>
+         <dependency>
+-            <groupId>org.eclipse.jetty.aggregate</groupId>
+-            <artifactId>jetty-all-server</artifactId>
++            <groupId>org.eclipse.jetty</groupId>
++            <artifactId>jetty-server</artifactId>
++            <version>${jetty.webhcat.version}</version>
++            <scope>compile</scope>
++        </dependency>
++        <dependency>
++            <groupId>org.eclipse.jetty</groupId>
++            <artifactId>jetty-util</artifactId>
++            <version>${jetty.webhcat.version}</version>
++            <scope>compile</scope>
++        </dependency>
++        <dependency>
++            <groupId>org.eclipse.jetty</groupId>
++            <artifactId>jetty-rewrite</artifactId>
+             <version>${jetty.webhcat.version}</version>
+             <scope>compile</scope>
+         </dependency>
+@@ -106,5 +118,23 @@
+             <version>${slf4j.version}</version>
+             <scope>compile</scope>
+         </dependency>
++        <dependency>
++            <groupId>org.apache.hadoop</groupId>
++            <artifactId>hadoop-common</artifactId>
++            <version>${hadoop23.version}</version>
++            <scope>compile</scope>
++        </dependency>
++        <dependency>
++            <groupId>org.apache.hadoop</groupId>
++            <artifactId>hadoop-mapreduce-client-core</artifactId>
++            <version>${hadoop23.version}</version>
++            <scope>compile</scope>
++        </dependency>
++        <dependency>
++            <groupId>org.apache.hadoop</groupId>
++            <artifactId>hadoop-hdfs</artifactId>
++            <version>${hadoop23.version}</version>
++            <scope>compile</scope>
++        </dependency>
+     </dependencies>
+ </project>
+diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
+index 0f37278..fb3f825 100644
+--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
++++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
+@@ -25,6 +25,8 @@
+ import java.io.IOException;
+ import java.util.ArrayList;
+ import java.util.HashMap;
++import java.util.EnumSet;
++import javax.servlet.DispatcherType;
+ 
+ import org.apache.commons.logging.Log;
+ import org.apache.commons.logging.LogFactory;
+@@ -169,21 +171,21 @@ public Server runServer(int port)
+      * callbacks. So jetty would fail the request as unauthorized.
+      */ 
+     root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/ddl/*", 
+-             FilterMapping.REQUEST);
++             EnumSet.of(DispatcherType.REQUEST));
+     root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/pig/*", 
+-             FilterMapping.REQUEST);
++             EnumSet.of(DispatcherType.REQUEST));
+     root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/hive/*", 
+-             FilterMapping.REQUEST);
++             EnumSet.of(DispatcherType.REQUEST));
+     root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/queue/*", 
+-             FilterMapping.REQUEST);
++             EnumSet.of(DispatcherType.REQUEST));
+     root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/jobs/*",
+-             FilterMapping.REQUEST);
++             EnumSet.of(DispatcherType.REQUEST));
+     root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/mapreduce/*", 
+-             FilterMapping.REQUEST);
++             EnumSet.of(DispatcherType.REQUEST));
+     root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/status/*", 
+-             FilterMapping.REQUEST);
++             EnumSet.of(DispatcherType.REQUEST));
+     root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/version/*", 
+-             FilterMapping.REQUEST);
++             EnumSet.of(DispatcherType.REQUEST));
+ 
+     // Connect Jersey
+     ServletHolder h = new ServletHolder(new ServletContainer(makeJerseyConfig()));
+diff --git a/hwi/ivy.xml b/hwi/ivy.xml
+index 81fa89b..aa5435d 100644
+--- a/hwi/ivy.xml
++++ b/hwi/ivy.xml
+@@ -28,7 +28,7 @@
+   <dependencies>
+     <dependency org="org.apache.hive" name="hive-cli" rev="${version}"
+                 conf="compile->default" />
+-    <dependency org="org.mortbay.jetty" name="jetty" rev="${jetty.version}" />
++    <dependency org="org.eclipse.jetty" name="jetty-util" rev="${jetty.version}" />
+ 
+     <!-- Test Dependencies -->
+     <dependency org="commons-httpclient" name="commons-httpclient" rev="${commons-httpclient.version}"
+diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java
+index 545f687..d34a7ae 100644
+--- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java
++++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java
+@@ -103,7 +103,7 @@ public void start() throws IOException {
+           webServer.join();
+           l4j.debug(" HWI Web Server is started.");
+           break;
+-        } catch (org.mortbay.util.MultiException ex) {
++        } catch (org.eclipse.jetty.util.MultiException ex) {
+           throw ex;
+         }
+       }
+diff --git a/ivy/ivysettings.xml b/ivy/ivysettings.xml
+index d230f2c..0119f96 100644
+--- a/ivy/ivysettings.xml
++++ b/ivy/ivysettings.xml
+@@ -41,49 +41,25 @@
+   <include url="${ivy.default.conf.dir}/ivyconf-local.xml"/>
+   <settings defaultResolver="${resolvers}"/>
+ 
+-  <resolvers>
+-    <ibiblio name="maven2" root="${repo.maven.org}" pattern="${maven2.pattern.ext}" m2compatible="true"/>
+-    <ibiblio name="apache-snapshot" root="${snapshot.apache.org}" m2compatible="true"
+-             checkmodified="${ivy.checkmodified}" 
+-             changingPattern="${ivy.changingPattern}"/>
+-
+-    <url name="datanucleus-repo" m2compatible="true">
+-      <artifact pattern="${datanucleus.repo}/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).[ext]"/>
+-    </url>
++  <typedef name="xmvn" classname="org.fedoraproject.xmvn.connector.ivy.IvyResolver"/>
+ 
+-    <url name="sourceforge" m2compatible="false" checksums="">
+-      <artifact pattern="${sourceforge-repo}/[module]/files/[module]/[branch]/[module]-[revision](-[classifier]).[ext]"/>
+-    </url>
++  <resolvers>
++    <xmvn name="XMvn"/>
+ 
+-    <filesystem name="fs" m2compatible="true" alwaysCheckExactRevision="true">
+-       <artifact pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).[ext]"/>
+-       <ivy pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).pom"/>
++    <filesystem name="build" m2compatible="false" alwaysCheckExactRevision="false">
++       <artifact pattern="${build.dir.hive}/shims/[module]-[revision](-[classifier]).[ext]"/>
++       <artifact pattern="${build.dir.hive}/common/[module]-[revision](-[classifier]).[ext]"/>
++       <artifact pattern="${build.dir.hive}/serde/[module]-[revision](-[classifier]).[ext]"/>
++       <artifact pattern="${build.dir.hive}/metastore/[module]-[revision](-[classifier]).[ext]"/>
++       <artifact pattern="${build.dir.hive}/ql/[module]-[revision](-[classifier]).[ext]"/>
++       <artifact pattern="${build.dir.hive}/service/[module]-[revision](-[classifier]).[ext]"/>
++       <artifact pattern="${build.dir.hive}/cli/[module]-[revision](-[classifier]).[ext]"/>
+     </filesystem>
+ 
+-    <chain name="default" dual="true" returnFirst="true" 
+-           checkmodified="${ivy.checkmodified}" 
+-           changingPattern="${ivy.changingPattern}">
+-      <resolver ref="local" />
+-      <resolver ref="apache-snapshot"/>
+-      <resolver ref="maven2"/>
+-      <resolver ref="datanucleus-repo"/>
+-      <resolver ref="sourceforge"/>
+-    </chain>
+-
+-    <chain name="internal" dual="true">
+-      <resolver ref="local" />
+-      <resolver ref="fs"/>
+-      <resolver ref="apache-snapshot"/>
+-      <resolver ref="maven2"/>
+-      <resolver ref="datanucleus-repo"/>
+-      <resolver ref="sourceforge"/>
+-    </chain>
+-
+-    <chain name="external">
+-      <resolver ref="maven2"/>
+-      <resolver ref="datanucleus-repo"/>
++    <chain name="default" dual="true">
++      <resolver ref="XMvn" />
++      <resolver ref="build" />
+     </chain>
+-
+   </resolvers>
+ 
+   <modules>
+diff --git a/ivy/libraries.properties b/ivy/libraries.properties
+index 92ba790..8c9789b 100644
+--- a/ivy/libraries.properties
++++ b/ivy/libraries.properties
+@@ -50,7 +50,7 @@ javaewah.version=0.3.2
+ jdo-api.version=3.0.1
+ jdom.version=1.1
+ jetty.version=6.1.26
+-jline.version=0.9.94
++jline.version=1.0
+ json.version=20090211
+ junit.version=4.10
+ libfb303.version=0.9.0
+diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+index f155686..9a511bd 100644
+--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
++++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+@@ -235,7 +235,6 @@ private void openSession() throws SQLException {
+ 
+     try {
+       TOpenSessionResp openResp = client.OpenSession(openReq);
+-
+       // validate connection
+       Utils.verifySuccess(openResp.getStatus());
+       if (!supportedProtocols.contains(openResp.getServerProtocolVersion())) {
+diff --git a/metastore/ivy.xml b/metastore/ivy.xml
+index 4bbdfe6..1cd6399 100644
+--- a/metastore/ivy.xml
++++ b/metastore/ivy.xml
+@@ -31,9 +31,6 @@
+     <dependency org="org.antlr" name="ST4" rev="${ST4.version}" transitive="false"/><!-- manually added (antlr dep), bad POM -->
+     <dependency org="org.apache.hive" name="hive-serde" rev="${version}"
+                 conf="compile->default" />
+-    <dependency org="com.jolbox" name="bonecp" rev="${BoneCP.version}">
+-        <exclude org="com.google.guava" module="guava"/>
+-    </dependency>
+ 
+     <dependency org="commons-pool" name="commons-pool" rev="${commons-pool.version}"/>
+     <dependency org="org.datanucleus" name="datanucleus-api-jdo" rev="${datanucleus-api-jdo.version}">
+diff --git a/odbc/Makefile b/odbc/Makefile
+index 2c55903..db6ba21 100644
+--- a/odbc/Makefile
++++ b/odbc/Makefile
+@@ -46,7 +46,7 @@ ARXFLAGS = -x
+ CC = gcc
+ CFLAGS = -Wall -g -fPIC
+ CXX = g++
+-CXXFLAGS = -Wall -g -fPIC
++CXXFLAGS = -Wall -g -fPIC -DHAVE_STDINT_H
+ LD = g++
+ INSTALL = /usr/bin/install -c
+ SHELL = /bin/sh
+@@ -135,7 +135,7 @@ LIB_FB303_ADD = -L$(LIB_FB303_DIR) -lfb303
+ LIB_FB303_AR  = $(LIB_FB303_DIR)/libfb303.a
+ 
+ 
+-all:: $(AR_TARGET) $(SO_TARGET) $(HIVE_CLIENT_TEST)
++all:: $(SO_TARGET) $(HIVE_CLIENT_TEST)
+ 
+ $(AR_TARGET): $(METASTORE_OBJS) $(SERVICE_OBJS) $(QL_OBJS) $(ODBC_OBJS)
+ 	if test -z '$(THRIFT_HOME)'; then echo 'THRIFT_HOME directory?'; exit 1; else exit 0; fi
+@@ -189,5 +189,5 @@ uninstall:
+ clean:
+ 	rm -rf $(ODBC_BUILD_DIR) $(OBJ_SERVICE_BUILD_DIR) $(OBJ_QL_BUILD_DIR) $(OBJ_METASTORE_BUILD_DIR)
+ 
+-test: $(AR_TARGET) $(SO_TARGET) $(HIVE_CLIENT_TEST)
++test: $(SO_TARGET) $(HIVE_CLIENT_TEST)
+ 	LD_LIBRARY_PATH=$(LIB_ODBC_BUILD_DIR):$(LIB_THRIFT_DIR):$(LIB_FB303_DIR):$(LD_LIBRARY_PATH) $(HIVE_CLIENT_TEST)
+diff --git a/odbc/src/cpp/hiveclient.cpp b/odbc/src/cpp/hiveclient.cpp
+index 450eb0b..dc5aeab 100644
+--- a/odbc/src/cpp/hiveclient.cpp
++++ b/odbc/src/cpp/hiveclient.cpp
+@@ -18,6 +18,7 @@
+ 
+ #include <assert.h>
+ #include <iostream>
++#include <arpa/inet.h>
+ #include <boost/shared_ptr.hpp>
+ #include <boost/algorithm/string.hpp>
+ 
+diff --git a/ql/build.xml b/ql/build.xml
+index 64e7b59..95344fd 100644
+--- a/ql/build.xml
++++ b/ql/build.xml
+@@ -197,7 +197,10 @@
+      deprecation="${javac.deprecation}"
+      includeantruntime="false">
+       <compilerarg line="${javac.args} ${javac.args.warnings}" />
+-      <classpath refid="classpath"/>
++      <classpath> 
++        <pathelement location="${build.ivy.lib.dir}/default/jline-${jline.version}.jar"/>
++        <path refid="classpath"/>
++      </classpath>
+     </javac>
+     <copy todir="${build.classes}" failonerror="false">
+       <fileset dir="${src.dir}/conf"/>
+@@ -208,108 +211,12 @@
+ 
+   <target name="jar" depends="make-pom,compile">
+     <echo message="Project: ${ant.project.name}"/>
+-    <unzip src="${build.ivy.lib.dir}/default/libthrift-${libthrift.version}.jar" dest="${build.dir.hive}/thrift/classes">
+-      <patternset>
+-          <exclude name="META-INF"/>
+-          <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip src="${build.ivy.lib.dir}/default/commons-lang-${commons-lang.version}.jar" dest="${build.dir.hive}/commons-lang/classes">
+-      <patternset>
+-          <exclude name="META-INF"/>
+-          <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip src="${build.ivy.lib.dir}/default/json-${json.version}.jar" dest="${build.dir.hive}/json/classes">
+-      <patternset>
+-          <exclude name="META-INF"/>
+-          <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip src="${build.ivy.lib.dir}/default/JavaEWAH-${javaewah.version}.jar" dest="${build.dir.hive}/javaewah/classes">
+-      <patternset>
+-        <exclude name="meta-inf"/>
+-        <exclude name="meta-inf/manifest.mf"/>
+-      </patternset>
+-    </unzip>
+-    <unzip src="${build.ivy.lib.dir}/default/avro-${avro.version}.jar" dest="${build.dir.hive}/avro/classes">
+-      <patternset>
+-        <exclude name="META-INF"/>
+-        <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip src="${build.ivy.lib.dir}/default/avro-mapred-${avro.version}.jar" dest="${build.dir.hive}/avro-mapred/classes">
+-      <patternset>
+-        <exclude name="META-INF"/>
+-        <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip src="${build.ivy.lib.dir}/default/javolution-${javolution.version}.jar" dest="${build.dir.hive}/javolution/classes">
+-      <patternset>
+-        <exclude name="META-INF"/>
+-        <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip
+-      src="${build.ivy.lib.dir}/default/protobuf-java-${protobuf.version}.jar"
+-      dest="${build.dir.hive}/protobuf-java/classes">
+-      <patternset>
+-        <exclude name="META-INF"/>
+-        <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip
+-      src="${build.ivy.lib.dir}/default/guava-${guava.version}.jar"
+-      dest="${build.dir.hive}/guava/classes">
+-      <patternset>
+-        <exclude name="META-INF"/>
+-        <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-
+-    <unzip
+-      src="${build.ivy.lib.dir}/default/snappy-${snappy.version}.jar" 
+-      dest="${build.dir.hive}/snappy/classes">
+-      <patternset>
+-        <exclude name="META-INF"/>
+-        <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip 
+-      src="${build.ivy.lib.dir}/default/jackson-core-asl-${jackson.version}.jar" 
+-      dest="${build.dir.hive}/jackson-core-asl/classes">
+-      <patternset>
+-        <exclude name="META-INF"/>
+-        <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+-    <unzip 
+-      src="${build.ivy.lib.dir}/default/jackson-mapper-asl-${jackson.version}.jar" 
+-      dest="${build.dir.hive}/jackson-mapper-asl/classes">
+-      <patternset>
+-        <exclude name="META-INF"/>
+-        <exclude name="META-INF/MANIFEST.MF"/>
+-      </patternset>
+-    </unzip>
+ 
+     <!-- jar jarfile="${build.dir}/hive_${name}.jar" basedir="${build.classes}" / -->
+     <jar jarfile="${build.dir}/hive-exec-${version}.jar">
+       <fileset dir="${build.dir.hive}/common/classes" includes="**/*.class"/>
+       <fileset dir="${build.dir.hive}/ql/classes" includes="**/*.class,**/*.properties"/>
+       <fileset dir="${build.dir.hive}/serde/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/thrift/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/commons-lang/classes" includes="**/StringUtils.class,**/WordUtils.class"/>
+-      <fileset dir="${build.dir.hive}/json/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/avro/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/avro-mapred/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/shims/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/javaewah/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/javolution/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/protobuf-java/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/snappy/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/jackson-core-asl/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/jackson-mapper-asl/classes" includes="**/*.class"/>
+-      <fileset dir="${build.dir.hive}/guava/classes" includes="**/*.class"/>
+       <manifest>
+         <!-- Not putting these in their own manifest section, since that inserts
+              a new-line, which breaks the reading of the attributes. -->
+diff --git a/ql/ivy.xml b/ql/ivy.xml
+index 08a8d6f..3ff4f40 100644
+--- a/ql/ivy.xml
++++ b/ql/ivy.xml
+@@ -44,10 +44,10 @@
+     </dependency>
+     <dependency org="com.google.protobuf" name="protobuf-java" 
+                 rev="${protobuf.version}" transitive="false"/>
+-    <dependency org="org.iq80.snappy" name="snappy" 
++    <dependency org="org.xerial.snappy" name="snappy-java" 
+                 rev="${snappy.version}" transitive="false"/>
+ 
+-    <dependency org="org.json" name="json" rev="${json.version}"/>
++    <dependency org="net.sf.json-lib" name="json-lib" rev="${json.version}"/>
+     <dependency org="commons-collections" name="commons-collections" rev="${commons-collections.version}"/>
+     <dependency org="commons-configuration" name="commons-configuration" rev="${commons-configuration.version}"
+                 transitive="false"/>
+@@ -57,13 +57,6 @@
+     <dependency org="jline" name="jline" rev="${jline.version}" transitive="false"/>
+ 
+     <!-- Hack to get jobclient tests dependency in. -->
+-    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests"
+-                rev="${hadoop-0.23.version}"
+-                conf="hadoop23.test->default">
+-      <artifact name="hadoop-yarn-server-tests" type="tests" ext="jar" m:classifier="tests"/>
+-      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+-      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+-    </dependency>
+     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app"
+                 rev="${hadoop-0.23.version}"
+                 conf="hadoop23.test->default">
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+index 7e1f6ef..e5178b6 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+@@ -48,9 +48,9 @@
+ import org.apache.hadoop.hive.ql.plan.api.StageType;
+ import org.apache.hadoop.io.IOUtils;
+ import org.apache.hadoop.util.StringUtils;
+-import org.json.JSONArray;
+-import org.json.JSONException;
+-import org.json.JSONObject;
++import org.codehaus.jettison.json.JSONArray;
++import org.codehaus.jettison.json.JSONException;
++import org.codehaus.jettison.json.JSONObject;
+ 
+ /**
+  * ExplainTask implementation.
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+index 5beb48e..db0b623 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+@@ -1129,9 +1129,7 @@ Object next(Object previous) throws IOException {
+     void seek(PositionProvider[] index) throws IOException {
+       super.seek(index);
+       for(TreeReader kid: fields) {
+-        if (kid != null) {
+-          kid.seek(index);
+-        }
++        kid.seek(index);
+       }
+     }
+ 
+@@ -1177,9 +1175,7 @@ void startStripe(Map<StreamName, InStream> streams,
+     void skipRows(long items) throws IOException {
+       items = countNonNulls(items);
+       for(TreeReader field: fields) {
+-        if (field != null) {
+-          field.skipRows(items);
+-        }
++        field.skipRows(items);
+       }
+     }
+   }
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java
+index e3131a3..c8324ad 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java
+@@ -18,7 +18,7 @@
+ 
+ package org.apache.hadoop.hive.ql.io.orc;
+ 
+-import org.iq80.snappy.Snappy;
++import org.xerial.snappy.Snappy;
+ 
+ import java.io.IOException;
+ import java.nio.ByteBuffer;
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+index 3031d1c..222f276 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+@@ -47,9 +47,9 @@
+ import org.apache.thrift.TException;
+ import org.apache.thrift.TSerializer;
+ import org.apache.thrift.protocol.TJSONProtocol;
+-import org.json.JSONArray;
+-import org.json.JSONException;
+-import org.json.JSONObject;
++import org.codehaus.jettison.json.JSONArray;
++import org.codehaus.jettison.json.JSONException;
++import org.codehaus.jettison.json.JSONObject;
+ 
+ /**
+  *
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
+index 58ea3ba..198bab5 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
+@@ -22,7 +22,7 @@
+ import java.util.ArrayList;
+ import java.util.List;
+ 
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+ 
+ import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+ import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
+index e4b412e..bb8afe7 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
+@@ -20,7 +20,7 @@
+ import java.io.IOException;
+ import java.util.ArrayList;
+ 
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+ 
+ import org.apache.commons.logging.Log;
+ import org.apache.commons.logging.LogFactory;
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
+index 7838b54..b942988 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
+@@ -18,7 +18,7 @@
+ 
+ package org.apache.hadoop.hive.ql.udf.generic;
+ 
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+ 
+ import org.apache.hadoop.hive.ql.exec.Description;
+ 
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
+index 4a14a65..f0617c1 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
+@@ -21,7 +21,7 @@
+ import java.io.IOException;
+ import java.util.ArrayList;
+ 
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+ 
+ import org.apache.hadoop.hive.ql.exec.Description;
+ import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
+index d438f82..47e9447 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
+@@ -18,7 +18,7 @@
+ 
+ package org.apache.hadoop.hive.ql.udf.generic;
+ 
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+ 
+ import org.apache.hadoop.hive.ql.exec.Description;
+ 
+diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+index 857e627..1098f08 100644
+--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
++++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+@@ -59,7 +59,7 @@
+   protected int portNum;
+   protected InetSocketAddress serverAddress;
+   protected TServer server;
+-  protected org.mortbay.jetty.Server httpServer;
++  protected org.eclipse.jetty.server.Server httpServer;
+ 
+   private boolean isStarted = false;
+   protected boolean isEmbedded = false;
+diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
+index e487a7f..e6e139b 100644
+--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
++++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
+@@ -26,10 +26,11 @@
+ import org.apache.thrift.protocol.TBinaryProtocol;
+ import org.apache.thrift.protocol.TProtocolFactory;
+ import org.apache.thrift.server.TServlet;
+-import org.mortbay.jetty.nio.SelectChannelConnector;
+-import org.mortbay.jetty.servlet.Context;
+-import org.mortbay.jetty.servlet.ServletHolder;
+-import org.mortbay.thread.QueuedThreadPool;
++import org.eclipse.jetty.server.Connector;
++import org.eclipse.jetty.server.nio.SelectChannelConnector;
++import org.eclipse.jetty.servlet.ServletContextHandler;
++import org.eclipse.jetty.servlet.ServletHolder;
++import org.eclipse.jetty.util.thread.QueuedThreadPool;
+ 
+ 
+ public class ThriftHttpCLIService extends ThriftCLIService {
+@@ -75,11 +76,10 @@ public void run() {
+         }
+       }
+ 
+-      httpServer = new org.mortbay.jetty.Server();
+-
+       QueuedThreadPool threadPool = new QueuedThreadPool();
+       threadPool.setMinThreads(minWorkerThreads);
+       threadPool.setMaxThreads(maxWorkerThreads);
++      httpServer = new org.eclipse.jetty.server.Server();
+       httpServer.setThreadPool(threadPool);
+       SelectChannelConnector connector = new SelectChannelConnector();
+       connector.setPort(portNum);
+@@ -93,7 +93,7 @@ public void run() {
+ 
+       TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
+       TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory);
+-      final Context context = new Context(httpServer, "/", Context.SESSIONS);
++      final ServletContextHandler context = new ServletContextHandler(httpServer, "/", ServletContextHandler.SESSIONS);
+       context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
+ 
+       // TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc.
+@@ -144,4 +144,4 @@ private static void verifyHttpConfiguration(HiveConf hiveConf) {
+     }
+   }
+ 
+-}
+\ No newline at end of file
++}
+diff --git a/shims/ivy.xml b/shims/ivy.xml
+index c0312bc..842f336 100644
+--- a/shims/ivy.xml
++++ b/shims/ivy.xml
+@@ -39,7 +39,7 @@
+     <dependency org="commons-logging" name="commons-logging-api" rev="${commons-logging-api.version}"
+                 transitive="false"/>
+     <dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="${jackson.version}"/>
+-    <dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="${jackson.version}"/>
++    <dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="${jackson.version}" transitive="false"/>
+     <dependency org="log4j" name="log4j" rev="${log4j.version}" />
+     <dependency org="com.google.guava" name="guava" rev="${guava.version}" transitive="false"/>
+ 
+@@ -48,9 +48,10 @@
+                 rev="${hadoop-0.23.version}"
+                 conf="hadoop0.23.shim->default">
+       <artifact name="hadoop-common" ext="jar" />
+-      <artifact name="hadoop-common" type="tests" ext="jar" m:classifier="tests"/>
++      <artifact name="hadoop-common" ext="jar" m:classifier="tests"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-core"
+                 rev="${hadoop-0.23.version}"
+@@ -58,6 +59,7 @@
+       <include type="jar"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+     <dependency org="org.apache.hadoop" name="hadoop-archives"
+                 rev="${hadoop-0.23.version}"
+@@ -65,14 +67,16 @@
+       <include type="jar"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+     <dependency org="org.apache.hadoop" name="hadoop-hdfs"
+                 rev="${hadoop-0.23.version}"
+                 conf="hadoop0.23.shim->default">
+       <artifact name="hadoop-hdfs" ext="jar" />
+-      <artifact name="hadoop-hdfs" type="tests" ext="jar" m:classifier="tests"/>
++      <artifact name="hadoop-hdfs" ext="jar" m:classifier="tests"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient"
+                 rev="${hadoop-0.23.version}"
+@@ -81,6 +85,7 @@
+       <artifact name="hadoop-mapreduce-client-jobclient" type="tests" ext="jar" m:classifier="tests"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-common"
+                 rev="${hadoop-0.23.version}"
+@@ -88,6 +93,7 @@
+       <include type="jar"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+ 
+     <!-- jobclient tests dependency -->
+@@ -97,13 +103,7 @@
+       <artifact name="hadoop-mapreduce-client-jobclient" type="tests" ext="jar" m:classifier="tests"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+-    </dependency>
+-    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests"
+-                rev="${hadoop-0.23.version}"
+-                conf="hadoop0.23.shim->default">
+-      <artifact name="hadoop-yarn-server-tests" type="tests" ext="jar" m:classifier="tests"/>
+-      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+-      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app"
+                 rev="${hadoop-0.23.version}"
+@@ -111,6 +111,7 @@
+       <include type="jar"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs"
+                 rev="${hadoop-0.23.version}"
+@@ -118,6 +119,7 @@
+       <include type="jar"/>
+       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
++      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/><!--bad POM-->
+     </dependency>
+ 
+     <!-- Hadoop 0.20 shim dependencies. Used for building 0.20 shims. -->
+diff --git a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java
+index 9328749..786a9d6 100644
+--- a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java
++++ b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java
+@@ -19,9 +19,11 @@
+ 
+ import java.io.IOException;
+ 
+-import org.mortbay.jetty.bio.SocketConnector;
+-import org.mortbay.jetty.handler.RequestLogHandler;
+-import org.mortbay.jetty.webapp.WebAppContext;
++import org.eclipse.jetty.server.Connector;
++import org.eclipse.jetty.server.Server;
++import org.eclipse.jetty.server.nio.SelectChannelConnector;
++import org.eclipse.jetty.server.handler.RequestLogHandler;
++import org.eclipse.jetty.webapp.WebAppContext;
+ 
+ /**
+  * Jetty23Shims.
+@@ -34,20 +36,20 @@ public Server startServer(String listen, int port) throws IOException {
+     return s;
+   }
+ 
+-  private static class Server extends org.mortbay.jetty.Server implements JettyShims.Server {
++  private static class Server extends org.eclipse.jetty.server.Server implements JettyShims.Server {
+     public void addWar(String war, String contextPath) {
+       WebAppContext wac = new WebAppContext();
+       wac.setContextPath(contextPath);
+       wac.setWar(war);
+       RequestLogHandler rlh = new RequestLogHandler();
+       rlh.setHandler(wac);
+-      this.addHandler(rlh);
++      this.setHandler(rlh);
+     }
+ 
+     public void setupListenerHostPort(String listen, int port)
+         throws IOException {
+ 
+-      SocketConnector connector = new SocketConnector();
++      Connector connector = new SelectChannelConnector();
+       connector.setPort(port);
+       connector.setHost(listen);
+       this.addConnector(connector);
diff --git a/hive.spec b/hive.spec
index 6310bc5..2847787 100644
--- a/hive.spec
+++ b/hive.spec
@@ -1,4 +1,4 @@
-%global hadoop_version 2.2.0
+%global hadoop_version 2.4.1
 %global hadoop_generation 23
 %global pig_version 0.12.0
 %global jetty_version 8.1.14.v20131031
@@ -6,22 +6,14 @@
 
 Name:          hive
 Version:       0.12.0
-Release:       4%{?dist}
+Release:       5%{?dist}
 Summary:       The Apache Hadoop data warehouse
 
 License:       ASL 2.0
 URL:           http://hive.apache.org/
 
 Source0:       https://github.com/apache/%{name}/archive/release-%{version}.tar.gz
-Patch0:        %{name}-general-build-mods.patch
-# following patches are organized per module
-Patch1:        %{name}-shims-mods.patch
-Patch2:        %{name}-metastore-mods.patch
-Patch3:        %{name}-ql-mods.patch
-Patch4:        %{name}-service-mods.patch
-Patch5:        %{name}-hcatalog-mods.patch
-Patch6:        %{name}-model-enhancer-asm.patch
-Patch7:        %{name}-hwi-mods.patch
+Patch0:        %{name}-integ.patch
 
 BuildRequires: activemq-core
 BuildRequires: activemq-kahadb
@@ -45,6 +37,7 @@ BuildRequires: jetty8
 BuildRequires: json-lib
 BuildRequires: json_simple
 BuildRequires: libthrift-java
+BuildRequires: make
 BuildRequires: maven-clean-plugin
 BuildRequires: maven-install-plugin
 BuildRequires: maven-local >= 3.5.0-2
@@ -82,7 +75,6 @@ Requires:       glassfish-el-api
 Requires:       guava
 Requires:       hadoop-common
 Requires:       hadoop-mapreduce
-Requires:       hbase
 Requires:       jackson
 Requires:       javaewah
 Requires:       javolution
@@ -127,13 +119,6 @@ This package contains javadoc for %{name}.
 %setup -q -n %{name}-release-%{version}
 
 %patch0 -p1
-%patch1 -p1
-%patch2 -p1
-%patch3 -p1
-%patch4 -p1
-%patch5 -p1
-%patch6 -p1
-%patch7 -p1
 
 find -name "*.jar" -delete
 
@@ -143,7 +128,6 @@ sed -i 's/\r//' LICENSE NOTICE README.txt
 sed -i "/<get.*ivy_repo_url.*ivy.jar/d" build.xml
 
 # hcatalog setup
-%pom_disable_module storage-handlers/hbase hcatalog
 %pom_remove_dep org.apache.pig:pig hcatalog
 %pom_add_dep org.apache.pig:pig:%{pig_version} hcatalog
 
@@ -159,7 +143,6 @@ sed -i "/<get.*ivy_repo_url.*ivy.jar/d" build.xml
 %pom_add_dep org.apache.hadoop:hadoop-common:%{hadoop_version} hcatalog/webhcat/svr
 %pom_add_dep org.apache.hadoop:hadoop-mapreduce-client-core:%{hadoop_version} hcatalog/webhcat/svr
 %pom_add_dep org.apache.hadoop:hadoop-hdfs:%{hadoop_version} hcatalog/webhcat/svr
-%pom_remove_dep org.eclipse.jetty.aggregate:jetty-all-server hcatalog/webhcat/svr
 %pom_add_dep org.eclipse.jetty:jetty-server:%{jetty_version} hcatalog/webhcat/svr
 %pom_add_dep org.eclipse.jetty:jetty-util:%{jetty_version} hcatalog/webhcat/svr
 %pom_add_dep org.eclipse.jetty:jetty-rewrite:%{jetty_version} hcatalog/webhcat/svr
@@ -189,9 +172,18 @@ export LC_ALL=en_US.UTF-8
 ant $ARG_BLOCK package
 
 # hbase-handler
-# TODO: upstream trunk patch for Hbase version 0.96 (HIVE-4388)
-# but trunk patch is TOO sprawling (665k);
-# will enable in a future TBD upstream release
+# latest hbase incompatible with this version of hive
+
+# massage some of the generated poms to keep xmvn quiet
+# about optional deps
+%pom_remove_dep org.apache.hadoop:hadoop-tools build/shims
+%pom_remove_dep org.apache.hadoop:hadoop-test build/shims
+%pom_remove_dep org.apache.hadoop:hadoop-core build/shims
+%pom_remove_dep org.apache.hadoop:hadoop-mapreduce-client-jobclient build/shims                                                                                                                                        
+%pom_remove_dep org.apache.hive:hive-hbase-handler build/ql
+%pom_remove_dep org.apache.hive:hive-testutils build/ql
+%pom_remove_dep org.apache.hbase:hbase build/ql
+%pom_remove_dep org.apache.mina:mina-core build/ql
 
 # before building hcatalog with xmvn, we need to install 
 # its newly created hive deps
@@ -209,6 +201,7 @@ pushd .
 cd hcatalog
 %mvn_build -f -s -j -- \
         -Dmvn.hadoop.profile=hadoop%{hadoop_generation} -Dhadoop.version=%{hadoop_version} \
+        -Dhadoop23.version=%{hadoop_version} \
         -Djetty.webhcat.version=%{jetty_version} \
         -Dpig.version=%{pig_version}
 
@@ -231,29 +224,29 @@ pushd .
 cd hcatalog
 
 # add the parent pom
-install -pm 0644 pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-hcatalog.pom
-%add_maven_depmap JPP.%{name}-hcatalog.pom -f "hcatalog"
+install -pm 0644 pom.xml %{buildroot}%{_mavenpomdir}/%{name}-hcatalog.pom
+%add_maven_depmap %{name}-hcatalog.pom -f "hcatalog"
 
 # inconsistent module naming means we do these individually
 install -pm 0644 core/target/hcatalog-core-%{version}.jar %{buildroot}%{_javadir}/%{name}/hcatalog-core.jar
-install -pm 0644 core/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-hcatalog-core.pom
-%add_maven_depmap JPP.%{name}-hcatalog-core.pom %{name}/hcatalog-core.jar -f "hcatalog"
+install -pm 0644 core/pom.xml %{buildroot}%{_mavenpomdir}/%{name}-hcatalog-core.pom
+%add_maven_depmap %{name}-hcatalog-core.pom %{name}/hcatalog-core.jar -f "hcatalog"
  
 install -pm 0644 hcatalog-pig-adapter/target/hcatalog-pig-adapter-%{version}.jar %{buildroot}%{_javadir}/%{name}/hcatalog-pig-adapter.jar
-install -pm 0644 hcatalog-pig-adapter/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-hcatalog-pig-adapter.pom
-%add_maven_depmap JPP.%{name}-hcatalog-pig-adapter.pom %{name}/hcatalog-pig-adapter.jar -f "hcatalog"
+install -pm 0644 hcatalog-pig-adapter/pom.xml %{buildroot}%{_mavenpomdir}/%{name}-hcatalog-pig-adapter.pom
+%add_maven_depmap %{name}-hcatalog-pig-adapter.pom %{name}/hcatalog-pig-adapter.jar -f "hcatalog"
 
 install -pm 0644 server-extensions/target/hcatalog-server-extensions-%{version}.jar %{buildroot}%{_javadir}/%{name}/hcatalog-server-extensions.jar
-install -pm 0644 server-extensions/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-hcatalog-server-extensions.pom
-%add_maven_depmap JPP.%{name}-hcatalog-server-extensions.pom %{name}/hcatalog-server-extensions.jar -f "hcatalog"
+install -pm 0644 server-extensions/pom.xml %{buildroot}%{_mavenpomdir}/%{name}-hcatalog-server-extensions.pom
+%add_maven_depmap %{name}-hcatalog-server-extensions.pom %{name}/hcatalog-server-extensions.jar -f "hcatalog"
 
 install -pm 0644 webhcat/java-client/target/webhcat-java-client-%{version}.jar %{buildroot}%{_javadir}/%{name}/webhcat-java-client.jar
-install -pm 0644 webhcat/java-client/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-webhcat-java-client.pom
-%add_maven_depmap JPP.%{name}-webhcat-java-client.pom %{name}/webhcat-java-client.jar -f "hcatalog"
+install -pm 0644 webhcat/java-client/pom.xml %{buildroot}%{_mavenpomdir}/%{name}-webhcat-java-client.pom
+%add_maven_depmap %{name}-webhcat-java-client.pom %{name}/webhcat-java-client.jar -f "hcatalog"
 
 install -pm 0644 webhcat/svr/target/webhcat-%{version}.jar %{buildroot}%{_javadir}/%{name}/webhcat.jar
-install -pm 0644 webhcat/svr/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-webhcat.pom
-%add_maven_depmap JPP.%{name}-webhcat.pom %{name}/webhcat.jar -f "hcatalog"
+install -pm 0644 webhcat/svr/pom.xml %{buildroot}%{_mavenpomdir}/%{name}-webhcat.pom
+%add_maven_depmap %{name}-webhcat.pom %{name}/webhcat.jar -f "hcatalog"
 
 popd
 
@@ -324,6 +317,10 @@ ln -s `xmvn-resolve org.apache.thrift:libthrift` %{buildroot}%{_datadir}/hadoop/
 %doc LICENSE NOTICE
 
 %changelog
+* Tue Nov 04 2014 Peter MacKinnon <pmackinn at redhat.com> 0.12.0-5
+- collapse separate patches into single integ
+- xmvn2 updates
+
 * Sat Jun 07 2014 Fedora Release Engineering <rel-eng at lists.fedoraproject.org> - 0.12.0-4
 - Rebuilt for https://fedoraproject.org/wiki/Fedora_21_Mass_Rebuild
 


More information about the scm-commits mailing list