[hive] Initial import (#1065446)
Peter MacKinnon
pmackinn at fedoraproject.org
Mon Mar 17 16:33:12 UTC 2014
commit 89b14f201940e88051b4eafad228b273a376aedc
Author: Peter MacKinnon <pmackinn at redhat.com>
Date: Mon Mar 17 12:32:08 2014 -0400
Initial import (#1065446)
.gitignore | 1 +
hive-general-build-mods.patch | 86 +++++++++++
hive-hcatalog-mods.patch | 55 +++++++
hive-metastore-mods.patch | 26 ++++
hive-model-enhancer-asm.patch | 28 ++++
hive-ql-mods.patch | 302 ++++++++++++++++++++++++++++++++++++++
hive-service-mods.patch | 70 +++++++++
hive-shims-mods.patch | 78 ++++++++++
hive.spec | 325 +++++++++++++++++++++++++++++++++++++++++
sources | 1 +
10 files changed, 972 insertions(+), 0 deletions(-)
---
diff --git a/.gitignore b/.gitignore
index e69de29..b894ec3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/release-0.12.0.tar.gz
diff --git a/hive-general-build-mods.patch b/hive-general-build-mods.patch
new file mode 100644
index 0000000..cb9936e
--- /dev/null
+++ b/hive-general-build-mods.patch
@@ -0,0 +1,86 @@
+diff --git a/build.properties b/build.properties
+index 008d1bb..010bae4 100644
+--- a/build.properties
++++ b/build.properties
+@@ -91,8 +91,8 @@ iterate.hive.nohcat.protobuf=ql
+ iterate.hive.nohcat.cpp=odbc
+
+ # core profile
+-iterate.hive.core.all=ant,shims,common,serde,metastore,ql,cli
+-iterate.hive.core.modules=shims,common,serde,metastore,ql,cli
++iterate.hive.core.all=ant,shims,common,serde,metastore,ql,service,cli
++iterate.hive.core.modules=shims,common,serde,metastore,ql,service,cli
+ iterate.hive.core.tests=ql
+ iterate.hive.core.thrift=ql
+ iterate.hive.core.protobuf=ql
+diff --git a/ivy/ivysettings.xml b/ivy/ivysettings.xml
+index d230f2c..44f3a04 100644
+--- a/ivy/ivysettings.xml
++++ b/ivy/ivysettings.xml
+@@ -41,49 +41,25 @@
+ <include url="${ivy.default.conf.dir}/ivyconf-local.xml"/>
+ <settings defaultResolver="${resolvers}"/>
+
+- <resolvers>
+- <ibiblio name="maven2" root="${repo.maven.org}" pattern="${maven2.pattern.ext}" m2compatible="true"/>
+- <ibiblio name="apache-snapshot" root="${snapshot.apache.org}" m2compatible="true"
+- checkmodified="${ivy.checkmodified}"
+- changingPattern="${ivy.changingPattern}"/>
+-
+- <url name="datanucleus-repo" m2compatible="true">
+- <artifact pattern="${datanucleus.repo}/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).[ext]"/>
+- </url>
++ <typedef name="xmvn" classname="org.fedoraproject.maven.connector.ivy.IvyResolver"/>
+
+- <url name="sourceforge" m2compatible="false" checksums="">
+- <artifact pattern="${sourceforge-repo}/[module]/files/[module]/[branch]/[module]-[revision](-[classifier]).[ext]"/>
+- </url>
++ <resolvers>
++ <xmvn name="XMvn"/>
+
+- <filesystem name="fs" m2compatible="true" alwaysCheckExactRevision="true">
+- <artifact pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).[ext]"/>
+- <ivy pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).pom"/>
++ <filesystem name="build" m2compatible="false" alwaysCheckExactRevision="false">
++ <artifact pattern="${build.dir.hive}/shims/[module]-[revision](-[classifier]).[ext]"/>
++ <artifact pattern="${build.dir.hive}/common/[module]-[revision](-[classifier]).[ext]"/>
++ <artifact pattern="${build.dir.hive}/serde/[module]-[revision](-[classifier]).[ext]"/>
++ <artifact pattern="${build.dir.hive}/metastore/[module]-[revision](-[classifier]).[ext]"/>
++ <artifact pattern="${build.dir.hive}/ql/[module]-[revision](-[classifier]).[ext]"/>
++ <artifact pattern="${build.dir.hive}/service/[module]-[revision](-[classifier]).[ext]"/>
++ <artifact pattern="${build.dir.hive}/cli/[module]-[revision](-[classifier]).[ext]"/>
+ </filesystem>
+
+- <chain name="default" dual="true" returnFirst="true"
+- checkmodified="${ivy.checkmodified}"
+- changingPattern="${ivy.changingPattern}">
+- <resolver ref="local" />
+- <resolver ref="apache-snapshot"/>
+- <resolver ref="maven2"/>
+- <resolver ref="datanucleus-repo"/>
+- <resolver ref="sourceforge"/>
+- </chain>
+-
+- <chain name="internal" dual="true">
+- <resolver ref="local" />
+- <resolver ref="fs"/>
+- <resolver ref="apache-snapshot"/>
+- <resolver ref="maven2"/>
+- <resolver ref="datanucleus-repo"/>
+- <resolver ref="sourceforge"/>
+- </chain>
+-
+- <chain name="external">
+- <resolver ref="maven2"/>
+- <resolver ref="datanucleus-repo"/>
++ <chain name="default" dual="true">
++ <resolver ref="XMvn" />
++ <resolver ref="build" />
+ </chain>
+-
+ </resolvers>
+
+ <modules>
+--
+1.8.5.2
+
diff --git a/hive-hcatalog-mods.patch b/hive-hcatalog-mods.patch
new file mode 100644
index 0000000..8c37139
--- /dev/null
+++ b/hive-hcatalog-mods.patch
@@ -0,0 +1,55 @@
+From 0ce8b1eca852563e634016656149662f60a33bad Mon Sep 17 00:00:00 2001
+From: Peter MacKinnon <pmackinn at redhat.com>
+Date: Wed, 8 Jan 2014 12:11:57 -0500
+Subject: [PATCH 07/10] hcatalog mods
+
+---
+ .../java/org/apache/hive/hcatalog/templeton/Main.java | 18 ++++++++++--------
+ 1 file changed, 10 insertions(+), 8 deletions(-)
+
+diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
+index 0f37278..fb3f825 100644
+--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
++++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
+@@ -25,6 +25,8 @@
+ import java.io.IOException;
+ import java.util.ArrayList;
+ import java.util.HashMap;
++import java.util.EnumSet;
++import javax.servlet.DispatcherType;
+
+ import org.apache.commons.logging.Log;
+ import org.apache.commons.logging.LogFactory;
+@@ -169,21 +171,21 @@ public Server runServer(int port)
+ * callbacks. So jetty would fail the request as unauthorized.
+ */
+ root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/ddl/*",
+- FilterMapping.REQUEST);
++ EnumSet.of(DispatcherType.REQUEST));
+ root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/pig/*",
+- FilterMapping.REQUEST);
++ EnumSet.of(DispatcherType.REQUEST));
+ root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/hive/*",
+- FilterMapping.REQUEST);
++ EnumSet.of(DispatcherType.REQUEST));
+ root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/queue/*",
+- FilterMapping.REQUEST);
++ EnumSet.of(DispatcherType.REQUEST));
+ root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/jobs/*",
+- FilterMapping.REQUEST);
++ EnumSet.of(DispatcherType.REQUEST));
+ root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/mapreduce/*",
+- FilterMapping.REQUEST);
++ EnumSet.of(DispatcherType.REQUEST));
+ root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/status/*",
+- FilterMapping.REQUEST);
++ EnumSet.of(DispatcherType.REQUEST));
+ root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/version/*",
+- FilterMapping.REQUEST);
++ EnumSet.of(DispatcherType.REQUEST));
+
+ // Connect Jersey
+ ServletHolder h = new ServletHolder(new ServletContainer(makeJerseyConfig()));
+--
+1.8.5.2
+
diff --git a/hive-metastore-mods.patch b/hive-metastore-mods.patch
new file mode 100644
index 0000000..f645d71
--- /dev/null
+++ b/hive-metastore-mods.patch
@@ -0,0 +1,26 @@
+From a473b36b26ec609a13ae9729e66c090664e69d1d Mon Sep 17 00:00:00 2001
+From: Peter MacKinnon <pmackinn at redhat.com>
+Date: Wed, 8 Jan 2014 11:36:53 -0500
+Subject: [PATCH 03/10] metastore mods
+
+---
+ metastore/ivy.xml | 3 ---
+ 1 file changed, 3 deletions(-)
+
+diff --git a/metastore/ivy.xml b/metastore/ivy.xml
+index 4bbdfe6..1cd6399 100644
+--- a/metastore/ivy.xml
++++ b/metastore/ivy.xml
+@@ -31,9 +31,6 @@
+ <dependency org="org.antlr" name="ST4" rev="${ST4.version}" transitive="false"/><!-- manually added (antlr dep), bad POM -->
+ <dependency org="org.apache.hive" name="hive-serde" rev="${version}"
+ conf="compile->default" />
+- <dependency org="com.jolbox" name="bonecp" rev="${BoneCP.version}">
+- <exclude org="com.google.guava" module="guava"/>
+- </dependency>
+
+ <dependency org="commons-pool" name="commons-pool" rev="${commons-pool.version}"/>
+ <dependency org="org.datanucleus" name="datanucleus-api-jdo" rev="${datanucleus-api-jdo.version}">
+--
+1.8.5.2
+
diff --git a/hive-model-enhancer-asm.patch b/hive-model-enhancer-asm.patch
new file mode 100644
index 0000000..2fea1d9
--- /dev/null
+++ b/hive-model-enhancer-asm.patch
@@ -0,0 +1,28 @@
+From 35410260d0e5617954c11bd9f99660eee6668907 Mon Sep 17 00:00:00 2001
+From: Peter MacKinnon <pmackinn at redhat.com>
+Date: Tue, 18 Feb 2014 19:10:06 +0000
+Subject: [PATCH] ensure model enhancer sees correct version of asm in
+ classpath
+
+---
+ build-common.xml | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/build-common.xml b/build-common.xml
+index 940f4e9..cb4d9b5 100644
+--- a/build-common.xml
++++ b/build-common.xml
+@@ -191,7 +191,9 @@
+ <fileset dir="${build.dir.hive}" includes="*/*.jar"/>
+ <fileset dir="${hive.root}/lib" includes="*.jar"/>
+ <fileset dir="${build.ivy.lib.dir}/default" includes="junit*.jar" />
+- <fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" erroronmissingdir="false" />
++ <fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar"
++ excludes="**/asm-*.jar"
++ erroronmissingdir="false" />
+ <fileset dir="${build.ivy.lib.dir}/default" includes="*.jar"
+ excludes="**/hadoop-*.jar"
+ erroronmissingdir="false"/>
+--
+1.8.5.3
+
diff --git a/hive-ql-mods.patch b/hive-ql-mods.patch
new file mode 100644
index 0000000..c7bd4ed
--- /dev/null
+++ b/hive-ql-mods.patch
@@ -0,0 +1,302 @@
+From 29b3cd2a98dba63bc6a26202ff1b084dac5aa5d3 Mon Sep 17 00:00:00 2001
+From: Peter MacKinnon <pmackinn at redhat.com>
+Date: Wed, 8 Jan 2014 12:08:10 -0500
+Subject: [PATCH 04/10] ql mods
+
+---
+ ql/build.xml | 101 +--------------------
+ ql/ivy.xml | 4 +-
+ .../apache/hadoop/hive/ql/exec/ExplainTask.java | 6 +-
+ .../hadoop/hive/ql/io/orc/RecordReaderImpl.java | 8 +-
+ .../apache/hadoop/hive/ql/io/orc/SnappyCodec.java | 2 +-
+ .../org/apache/hadoop/hive/ql/parse/EximUtil.java | 6 +-
+ .../generic/AbstractGenericUDFEWAHBitmapBop.java | 2 +-
+ .../hive/ql/udf/generic/GenericUDAFEWAHBitmap.java | 2 +-
+ .../ql/udf/generic/GenericUDFEWAHBitmapAnd.java | 2 +-
+ .../ql/udf/generic/GenericUDFEWAHBitmapEmpty.java | 2 +-
+ .../ql/udf/generic/GenericUDFEWAHBitmapOr.java | 2 +-
+ 11 files changed, 20 insertions(+), 117 deletions(-)
+
+diff --git a/ql/build.xml b/ql/build.xml
+index 64e7b59..95344fd 100644
+--- a/ql/build.xml
++++ b/ql/build.xml
+@@ -197,7 +197,10 @@
+ deprecation="${javac.deprecation}"
+ includeantruntime="false">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+- <classpath refid="classpath"/>
++ <classpath>
++ <pathelement location="${build.ivy.lib.dir}/default/jline-${jline.version}.jar"/>
++ <path refid="classpath"/>
++ </classpath>
+ </javac>
+ <copy todir="${build.classes}" failonerror="false">
+ <fileset dir="${src.dir}/conf"/>
+@@ -208,108 +211,12 @@
+
+ <target name="jar" depends="make-pom,compile">
+ <echo message="Project: ${ant.project.name}"/>
+- <unzip src="${build.ivy.lib.dir}/default/libthrift-${libthrift.version}.jar" dest="${build.dir.hive}/thrift/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip src="${build.ivy.lib.dir}/default/commons-lang-${commons-lang.version}.jar" dest="${build.dir.hive}/commons-lang/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip src="${build.ivy.lib.dir}/default/json-${json.version}.jar" dest="${build.dir.hive}/json/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip src="${build.ivy.lib.dir}/default/JavaEWAH-${javaewah.version}.jar" dest="${build.dir.hive}/javaewah/classes">
+- <patternset>
+- <exclude name="meta-inf"/>
+- <exclude name="meta-inf/manifest.mf"/>
+- </patternset>
+- </unzip>
+- <unzip src="${build.ivy.lib.dir}/default/avro-${avro.version}.jar" dest="${build.dir.hive}/avro/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip src="${build.ivy.lib.dir}/default/avro-mapred-${avro.version}.jar" dest="${build.dir.hive}/avro-mapred/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip src="${build.ivy.lib.dir}/default/javolution-${javolution.version}.jar" dest="${build.dir.hive}/javolution/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip
+- src="${build.ivy.lib.dir}/default/protobuf-java-${protobuf.version}.jar"
+- dest="${build.dir.hive}/protobuf-java/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip
+- src="${build.ivy.lib.dir}/default/guava-${guava.version}.jar"
+- dest="${build.dir.hive}/guava/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+-
+- <unzip
+- src="${build.ivy.lib.dir}/default/snappy-${snappy.version}.jar"
+- dest="${build.dir.hive}/snappy/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip
+- src="${build.ivy.lib.dir}/default/jackson-core-asl-${jackson.version}.jar"
+- dest="${build.dir.hive}/jackson-core-asl/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+- <unzip
+- src="${build.ivy.lib.dir}/default/jackson-mapper-asl-${jackson.version}.jar"
+- dest="${build.dir.hive}/jackson-mapper-asl/classes">
+- <patternset>
+- <exclude name="META-INF"/>
+- <exclude name="META-INF/MANIFEST.MF"/>
+- </patternset>
+- </unzip>
+
+ <!-- jar jarfile="${build.dir}/hive_${name}.jar" basedir="${build.classes}" / -->
+ <jar jarfile="${build.dir}/hive-exec-${version}.jar">
+ <fileset dir="${build.dir.hive}/common/classes" includes="**/*.class"/>
+ <fileset dir="${build.dir.hive}/ql/classes" includes="**/*.class,**/*.properties"/>
+ <fileset dir="${build.dir.hive}/serde/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/thrift/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/commons-lang/classes" includes="**/StringUtils.class,**/WordUtils.class"/>
+- <fileset dir="${build.dir.hive}/json/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/avro/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/avro-mapred/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/shims/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/javaewah/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/javolution/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/protobuf-java/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/snappy/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/jackson-core-asl/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/jackson-mapper-asl/classes" includes="**/*.class"/>
+- <fileset dir="${build.dir.hive}/guava/classes" includes="**/*.class"/>
+ <manifest>
+ <!-- Not putting these in their own manifest section, since that inserts
+ a new-line, which breaks the reading of the attributes. -->
+diff --git a/ql/ivy.xml b/ql/ivy.xml
+index 08a8d6f..6fe43b4 100644
+--- a/ql/ivy.xml
++++ b/ql/ivy.xml
+@@ -44,10 +44,10 @@
+ </dependency>
+ <dependency org="com.google.protobuf" name="protobuf-java"
+ rev="${protobuf.version}" transitive="false"/>
+- <dependency org="org.iq80.snappy" name="snappy"
++ <dependency org="org.xerial.snappy" name="snappy-java"
+ rev="${snappy.version}" transitive="false"/>
+
+- <dependency org="org.json" name="json" rev="${json.version}"/>
++ <dependency org="net.sf.json-lib" name="json-lib" rev="${json.version}"/>
+ <dependency org="commons-collections" name="commons-collections" rev="${commons-collections.version}"/>
+ <dependency org="commons-configuration" name="commons-configuration" rev="${commons-configuration.version}"
+ transitive="false"/>
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+index 7e1f6ef..e5178b6 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+@@ -48,9 +48,9 @@
+ import org.apache.hadoop.hive.ql.plan.api.StageType;
+ import org.apache.hadoop.io.IOUtils;
+ import org.apache.hadoop.util.StringUtils;
+-import org.json.JSONArray;
+-import org.json.JSONException;
+-import org.json.JSONObject;
++import org.codehaus.jettison.json.JSONArray;
++import org.codehaus.jettison.json.JSONException;
++import org.codehaus.jettison.json.JSONObject;
+
+ /**
+ * ExplainTask implementation.
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+index 5beb48e..db0b623 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+@@ -1129,9 +1129,7 @@ Object next(Object previous) throws IOException {
+ void seek(PositionProvider[] index) throws IOException {
+ super.seek(index);
+ for(TreeReader kid: fields) {
+- if (kid != null) {
+- kid.seek(index);
+- }
++ kid.seek(index);
+ }
+ }
+
+@@ -1177,9 +1175,7 @@ void startStripe(Map<StreamName, InStream> streams,
+ void skipRows(long items) throws IOException {
+ items = countNonNulls(items);
+ for(TreeReader field: fields) {
+- if (field != null) {
+- field.skipRows(items);
+- }
++ field.skipRows(items);
+ }
+ }
+ }
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java
+index e3131a3..c8324ad 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java
+@@ -18,7 +18,7 @@
+
+ package org.apache.hadoop.hive.ql.io.orc;
+
+-import org.iq80.snappy.Snappy;
++import org.xerial.snappy.Snappy;
+
+ import java.io.IOException;
+ import java.nio.ByteBuffer;
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+index 3031d1c..222f276 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+@@ -47,9 +47,9 @@
+ import org.apache.thrift.TException;
+ import org.apache.thrift.TSerializer;
+ import org.apache.thrift.protocol.TJSONProtocol;
+-import org.json.JSONArray;
+-import org.json.JSONException;
+-import org.json.JSONObject;
++import org.codehaus.jettison.json.JSONArray;
++import org.codehaus.jettison.json.JSONException;
++import org.codehaus.jettison.json.JSONObject;
+
+ /**
+ *
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
+index 58ea3ba..198bab5 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
+@@ -22,7 +22,7 @@
+ import java.util.ArrayList;
+ import java.util.List;
+
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+
+ import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+ import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
+index e4b412e..bb8afe7 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
+@@ -20,7 +20,7 @@
+ import java.io.IOException;
+ import java.util.ArrayList;
+
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+
+ import org.apache.commons.logging.Log;
+ import org.apache.commons.logging.LogFactory;
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
+index 7838b54..b942988 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
+@@ -18,7 +18,7 @@
+
+ package org.apache.hadoop.hive.ql.udf.generic;
+
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+
+ import org.apache.hadoop.hive.ql.exec.Description;
+
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
+index 4a14a65..f0617c1 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
+@@ -21,7 +21,7 @@
+ import java.io.IOException;
+ import java.util.ArrayList;
+
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+
+ import org.apache.hadoop.hive.ql.exec.Description;
+ import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
+index d438f82..47e9447 100644
+--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
+@@ -18,7 +18,7 @@
+
+ package org.apache.hadoop.hive.ql.udf.generic;
+
+-import javaewah.EWAHCompressedBitmap;
++import com.googlecode.javaewah.EWAHCompressedBitmap;
+
+ import org.apache.hadoop.hive.ql.exec.Description;
+
+--
+1.8.5.2
+
diff --git a/hive-service-mods.patch b/hive-service-mods.patch
new file mode 100644
index 0000000..fe27c59
--- /dev/null
+++ b/hive-service-mods.patch
@@ -0,0 +1,70 @@
+From d689c943b09f1b88d9294a43691b3964093fe021 Mon Sep 17 00:00:00 2001
+From: Peter MacKinnon <pmackinn at redhat.com>
+Date: Wed, 8 Jan 2014 12:10:20 -0500
+Subject: [PATCH 05/10] service mods
+
+---
+ .../apache/hive/service/cli/thrift/ThriftCLIService.java | 2 +-
+ .../hive/service/cli/thrift/ThriftHttpCLIService.java | 16 +++++++---------
+ 2 files changed, 8 insertions(+), 10 deletions(-)
+
+diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+index 857e627..1098f08 100644
+--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
++++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+@@ -59,7 +59,7 @@
+ protected int portNum;
+ protected InetSocketAddress serverAddress;
+ protected TServer server;
+- protected org.mortbay.jetty.Server httpServer;
++ protected org.eclipse.jetty.server.Server httpServer;
+
+ private boolean isStarted = false;
+ protected boolean isEmbedded = false;
+diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
+index e487a7f..b73df7d 100644
+--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
++++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
+@@ -26,10 +26,10 @@
+ import org.apache.thrift.protocol.TBinaryProtocol;
+ import org.apache.thrift.protocol.TProtocolFactory;
+ import org.apache.thrift.server.TServlet;
+-import org.mortbay.jetty.nio.SelectChannelConnector;
+-import org.mortbay.jetty.servlet.Context;
+-import org.mortbay.jetty.servlet.ServletHolder;
+-import org.mortbay.thread.QueuedThreadPool;
++import org.eclipse.jetty.server.ServerConnector;
++import org.eclipse.jetty.servlet.ServletContextHandler;
++import org.eclipse.jetty.servlet.ServletHolder;
++import org.eclipse.jetty.util.thread.QueuedThreadPool;
+
+
+ public class ThriftHttpCLIService extends ThriftCLIService {
+@@ -75,13 +75,11 @@ public void run() {
+ }
+ }
+
+- httpServer = new org.mortbay.jetty.Server();
+-
+ QueuedThreadPool threadPool = new QueuedThreadPool();
+ threadPool.setMinThreads(minWorkerThreads);
+ threadPool.setMaxThreads(maxWorkerThreads);
+- httpServer.setThreadPool(threadPool);
+- SelectChannelConnector connector = new SelectChannelConnector();
++ httpServer = new org.eclipse.jetty.server.Server(threadPool);
++ ServerConnector connector = new ServerConnector(httpServer);
+ connector.setPort(portNum);
+
+ // Linux:yes, Windows:no
+@@ -93,7 +91,7 @@ public void run() {
+
+ TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
+ TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory);
+- final Context context = new Context(httpServer, "/", Context.SESSIONS);
++ final ServletContextHandler context = new ServletContextHandler(httpServer, "/", ServletContextHandler.SESSIONS);
+ context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
+
+ // TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc.
+--
+1.8.5.2
+
diff --git a/hive-shims-mods.patch b/hive-shims-mods.patch
new file mode 100644
index 0000000..fa65824
--- /dev/null
+++ b/hive-shims-mods.patch
@@ -0,0 +1,78 @@
+From 43dfa35c041afb7071be97eb1ecdac034d9513d5 Mon Sep 17 00:00:00 2001
+From: Peter MacKinnon <pmackinn at redhat.com>
+Date: Wed, 8 Jan 2014 11:36:28 -0500
+Subject: [PATCH 02/10] shims mods
+
+---
+ shims/ivy.xml | 6 ++++++
+ .../0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java | 12 ++++++------
+ 2 files changed, 12 insertions(+), 6 deletions(-)
+
+diff --git a/shims/ivy.xml b/shims/ivy.xml
+index c0312bc..39ca73b 100644
+--- a/shims/ivy.xml
++++ b/shims/ivy.xml
+@@ -74,6 +74,9 @@
+ <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+ <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+ </dependency>
++ <dependency org="org.apache.hadoop" name="hadoop-hdfs-tests"
++ rev="${hadoop-0.23.version}"
++ conf="hadoop0.23.shim->default"/>
+ <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient"
+ rev="${hadoop-0.23.version}"
+ conf="hadoop0.23.shim->default">
+@@ -82,6 +85,9 @@
+ <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+ <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+ </dependency>
++ <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient-tests"
++ rev="${hadoop-0.23.version}"
++ conf="hadoop0.23.shim->default"/>
+ <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-common"
+ rev="${hadoop-0.23.version}"
+ conf="hadoop0.23.shim->default">
+diff --git a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java
+index 9328749..c1b3529 100644
+--- a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java
++++ b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java
+@@ -19,9 +19,9 @@
+
+ import java.io.IOException;
+
+-import org.mortbay.jetty.bio.SocketConnector;
+-import org.mortbay.jetty.handler.RequestLogHandler;
+-import org.mortbay.jetty.webapp.WebAppContext;
++import org.eclipse.jetty.server.ServerConnector;
++import org.eclipse.jetty.server.handler.RequestLogHandler;
++import org.eclipse.jetty.webapp.WebAppContext;
+
+ /**
+ * Jetty23Shims.
+@@ -34,20 +34,20 @@ public Server startServer(String listen, int port) throws IOException {
+ return s;
+ }
+
+- private static class Server extends org.mortbay.jetty.Server implements JettyShims.Server {
++ private static class Server extends org.eclipse.jetty.server.Server implements JettyShims.Server {
+ public void addWar(String war, String contextPath) {
+ WebAppContext wac = new WebAppContext();
+ wac.setContextPath(contextPath);
+ wac.setWar(war);
+ RequestLogHandler rlh = new RequestLogHandler();
+ rlh.setHandler(wac);
+- this.addHandler(rlh);
++ this.setHandler(rlh);
+ }
+
+ public void setupListenerHostPort(String listen, int port)
+ throws IOException {
+
+- SocketConnector connector = new SocketConnector();
++ ServerConnector connector = new ServerConnector(this);
+ connector.setPort(port);
+ connector.setHost(listen);
+ this.addConnector(connector);
+--
+1.8.5.2
+
diff --git a/hive.spec b/hive.spec
new file mode 100644
index 0000000..fa0414f
--- /dev/null
+++ b/hive.spec
@@ -0,0 +1,325 @@
+%global hadoop_version 2.2.0
+%global hadoop_generation 23
+%global pig_version 0.12.0
+%global jetty_version 8.1.14.v20131031
+%global jline_version 1
+
+Name: hive
+Version: 0.12.0
+Release: 1%{?dist}
+Summary: The Apache Hadoop data warehouse
+
+License: ASL 2.0
+URL: http://hive.apache.org/
+
+Source0: https://github.com/apache/%{name}/archive/release-%{version}.tar.gz
+Patch0: %{name}-general-build-mods.patch
+# following patches are organized per module
+Patch1: %{name}-shims-mods.patch
+Patch2: %{name}-metastore-mods.patch
+Patch3: %{name}-ql-mods.patch
+Patch4: %{name}-service-mods.patch
+Patch5: %{name}-hcatalog-mods.patch
+Patch6: %{name}-model-enhancer-asm.patch
+
+BuildRequires: activemq-core
+BuildRequires: activemq-kahadb
+BuildRequires: ant-contrib
+BuildRequires: apache-commons-exec
+BuildRequires: apache-ivy >= 2.3.0
+BuildRequires: avro-mapred
+BuildRequires: datanucleus-api-jdo
+BuildRequires: datanucleus-rdbms
+BuildRequires: fb303-devel
+BuildRequires: fb303-java
+BuildRequires: glassfish-servlet-api
+BuildRequires: hadoop-tests
+BuildRequires: ivy-local >= 3.5.0-2
+BuildRequires: jackson
+BuildRequires: java-devel
+BuildRequires: javaewah
+BuildRequires: javolution
+BuildRequires: jdo-api
+BuildRequires: jetty8
+BuildRequires: json-lib
+BuildRequires: json_simple
+BuildRequires: libthrift-java
+BuildRequires: maven-clean-plugin
+BuildRequires: maven-install-plugin
+BuildRequires: maven-local >= 3.5.0-2
+BuildRequires: pig
+BuildRequires: rsync
+BuildRequires: jul-to-slf4j
+BuildRequires: snappy-java
+BuildRequires: thrift-devel
+BuildRequires: xmvn >= 1.5.0
+BuildRequires: zookeeper-java
+
+BuildArch: noarch
+
+# problems with auto-requires being generated
+# Bug 1075626
+Requires: java >= 1:1.7.0
+Requires: antlr3-tool
+Requires: apache-commons-cli
+Requires: apache-commons-codec
+Requires: apache-commons-collections
+Requires: apache-commons-compress
+Requires: apache-commons-configuration
+Requires: apache-commons-lang
+Requires: apache-commons-logging
+Requires: apache-commons-pool
+Requires: avro-ipc
+Requires: avro-mapred
+Requires: datanucleus-api-jdo
+Requires: datanucleus-rdbms
+Requires: derby
+Requires: fb303-java
+Requires: glassfish-el-api
+Requires: guava
+Requires: hadoop-common
+Requires: hadoop-mapreduce
+Requires: hbase
+Requires: jackson
+Requires: javaewah
+Requires: javolution
+Requires: jboss-transaction-1.2-api
+Requires: jdo-api
+Requires: jetty8
+Requires: jline1
+Requires: json-lib
+Requires: libthrift-java
+Requires: log4j
+Requires: mockito
+Requires: netty3
+Requires: protobuf-java
+Requires: slf4j
+Requires: snappy-java
+Requires: zookeeper-java
+
+%description
+The Apache Hive data warehouse software facilitates querying and
+managing large datasets residing in distributed storage. Apache Hive
+provides a mechanism to project structure onto this data and query
+the data using a SQL-like language called HiveQL.
+
+%package hcatalog
+Summary: hcatalog module for Hive
+
+Requires: %{name}
+
+%description hcatalog
+This package contains module for the hcatalog sub-project in %{name}.
+
+%package javadoc
+Summary: Javadoc for %{name}
+
+%description javadoc
+This package contains javadoc for %{name}.
+
+%prep
+
+%mvn_package :hcatalog hcatalog
+
+%setup -q -n %{name}-release-%{version}
+
+%patch0 -p1
+%patch1 -p1
+%patch2 -p1
+%patch3 -p1
+%patch4 -p1
+%patch5 -p1
+%patch6 -p1
+
+find -name "*.jar" -delete
+
+sed -i 's/\r//' LICENSE NOTICE README.txt
+
+# make sure build doesn't d/l ivy
+sed -i "/<get.*ivy_repo_url.*ivy.jar/d" build.xml
+
+# hcatalog setup
+%pom_disable_module storage-handlers/hbase hcatalog
+%pom_remove_dep org.apache.pig:pig hcatalog
+%pom_add_dep org.apache.pig:pig:%{pig_version} hcatalog
+
+%pom_add_dep org.apache.hadoop:hadoop-common:%{hadoop_version} hcatalog/core
+%pom_add_dep org.apache.hadoop:hadoop-mapreduce-client-core:%{hadoop_version} hcatalog/core
+%pom_add_dep org.apache.hadoop:hadoop-archives:%{hadoop_version} hcatalog/core
+
+%pom_add_dep org.apache.hadoop:hadoop-common:%{hadoop_version} hcatalog/server-extensions
+
+%pom_add_dep org.apache.hadoop:hadoop-common:%{hadoop_version} hcatalog/webhcat/java-client
+%pom_add_dep org.apache.hadoop:hadoop-mapreduce-client-core:%{hadoop_version} hcatalog/webhcat/java-client
+
+%pom_add_dep org.apache.hadoop:hadoop-common:%{hadoop_version} hcatalog/webhcat/svr
+%pom_add_dep org.apache.hadoop:hadoop-mapreduce-client-core:%{hadoop_version} hcatalog/webhcat/svr
+%pom_add_dep org.apache.hadoop:hadoop-hdfs:%{hadoop_version} hcatalog/webhcat/svr
+%pom_remove_dep org.eclipse.jetty.aggregate:jetty-all-server hcatalog/webhcat/svr
+%pom_add_dep org.eclipse.jetty:jetty-server:%{jetty_version} hcatalog/webhcat/svr
+%pom_add_dep org.eclipse.jetty:jetty-util:%{jetty_version} hcatalog/webhcat/svr
+%pom_add_dep org.eclipse.jetty:jetty-rewrite:%{jetty_version} hcatalog/webhcat/svr
+
+# missing shebang
+sed -i -e '1d;2i#!/usr/bin/env bash' bin/hive-config.sh
+
+%build
+
+ARG_BLOCK="
+-Dhadoop.version=%{hadoop_version} -Dhadoop-0.%{hadoop_generation}.version=%{hadoop_version} \
+-Dhadoop.mr.rev=%{hadoop_generation} -Dmvn.hadoop.profile=hadoop%{hadoop_generation} \
+-Dshims.include=0.%{hadoop_generation} \
+-Dbuild.profile=core -Dthrift.home=/usr \
+-Djline.version=%{jline_version} \
+-Djetty.version=%{jetty_version} \
+"
+
+# for javadoc encoding
+export LC_ALL=en_US.UTF-8
+
+# core modules
+ant $ARG_BLOCK package
+
+# hbase-handler
+# TODO: upstream trunk patch for Hbase version 0.96 (HIVE-4388)
+# but trunk patch is TOO sprawling (665k);
+# will enable in a future TBD upstream release
+
+# before building hcatalog with xmvn, we need to install
+# its newly created hive deps
+for module in cli common metastore serde service shims; do
+ %mvn_artifact build/$module/pom.xml build/$module/%{name}-$module-%{version}.jar
+ xmvn install:install-file -Dfile=build/$module/%{name}-$module-%{version}.jar -DpomFile=build/$module/pom.xml
+done
+# ql -> hive-exec
+%mvn_artifact build/ql/pom.xml build/ql/%{name}-exec-%{version}.jar
+xmvn install:install-file -Dfile=build/ql/%{name}-exec-%{version}.jar -DpomFile=build/ql/pom.xml
+
+# hcatalog
+mkdir -p build/hcatalog/classes
+pushd .
+cd hcatalog
+%mvn_build -f -s -j -- \
+ -Dmvn.hadoop.profile=hadoop%{hadoop_generation} -Dhadoop.version=%{hadoop_version} \
+ -Djetty.webhcat.version=%{jetty_version} \
+ -Dpig.version=%{pig_version}
+
+# javadoc setup
+for module in core hcatalog-pig-adapter server-extensions webhcat/java-client webhcat/svr; do
+ cp -r ${module}/target/classes/* ../build/hcatalog/classes
+ cp -r ${module}/src/main/java/* src/java/
+done
+
+popd
+
+# combo javadoc this time
+ant $ARG_BLOCK javadoc
+
+%install
+%mvn_install -J build/dist/docs
+
+# hcatalog install
+pushd .
+cd hcatalog
+
+# add the parent pom
+install -pm 0644 pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-hcatalog.pom
+%add_maven_depmap JPP.%{name}-hcatalog.pom -f "hcatalog"
+
+# inconsistent module naming means we do these individually
+install -pm 0644 core/target/hcatalog-core-%{version}.jar %{buildroot}%{_javadir}/%{name}/hcatalog-core.jar
+install -pm 0644 core/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-hcatalog-core.pom
+%add_maven_depmap JPP.%{name}-hcatalog-core.pom %{name}/hcatalog-core.jar -f "hcatalog"
+
+install -pm 0644 hcatalog-pig-adapter/target/hcatalog-pig-adapter-%{version}.jar %{buildroot}%{_javadir}/%{name}/hcatalog-pig-adapter.jar
+install -pm 0644 hcatalog-pig-adapter/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-hcatalog-pig-adapter.pom
+%add_maven_depmap JPP.%{name}-hcatalog-pig-adapter.pom %{name}/hcatalog-pig-adapter.jar -f "hcatalog"
+
+install -pm 0644 server-extensions/target/hcatalog-server-extensions-%{version}.jar %{buildroot}%{_javadir}/%{name}/hcatalog-server-extensions.jar
+install -pm 0644 server-extensions/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-hcatalog-server-extensions.pom
+%add_maven_depmap JPP.%{name}-hcatalog-server-extensions.pom %{name}/hcatalog-server-extensions.jar -f "hcatalog"
+
+install -pm 0644 webhcat/java-client/target/webhcat-java-client-%{version}.jar %{buildroot}%{_javadir}/%{name}/webhcat-java-client.jar
+install -pm 0644 webhcat/java-client/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-webhcat-java-client.pom
+%add_maven_depmap JPP.%{name}-webhcat-java-client.pom %{name}/webhcat-java-client.jar -f "hcatalog"
+
+install -pm 0644 webhcat/svr/target/webhcat-%{version}.jar %{buildroot}%{_javadir}/%{name}/webhcat.jar
+install -pm 0644 webhcat/svr/pom.xml %{buildroot}%{_mavenpomdir}/JPP.%{name}-webhcat.pom
+%add_maven_depmap JPP.%{name}-webhcat.pom %{name}/webhcat.jar -f "hcatalog"
+
+popd
+
+# create the root from here
+install -d -m 0755 %{buildroot}%{_datadir}/%{name}
+install -d -m 0755 %{buildroot}%{_datadir}/%{name}/bin
+install -d -m 0755 %{buildroot}%{_datadir}/%{name}/bin/ext
+install -d -m 0755 %{buildroot}%{_datadir}/%{name}/bin/ext/util
+install -d -m 0755 %{buildroot}%{_datadir}/%{name}/lib
+install -d -m 0755 %{buildroot}%{_datadir}/%{name}/conf
+install -d -m 0755 %{buildroot}%{_sysconfdir}/%{name}
+
+# bin
+install -dm 0755 %{buildroot}%{_bindir}
+for f in %{name} hive-config.sh init-hive-dfs.sh metatool schematool; do
+ install -p bin/${f} %{buildroot}%{_datadir}/%{name}/bin
+ ln -s %{_datadir}/%{name}/bin/${f} %{buildroot}%{_bindir}/${f}
+done
+
+# bin/ext/util
+cp -pr bin/ext/* %{buildroot}%{_datadir}/%{name}/bin/ext
+
+# don't have these just yet...
+for f in beeline.sh hiveserver2.sh hwi.sh; do
+ rm %{buildroot}%{_datadir}/%{name}/bin/ext/${f}
+done
+
+# conf
+for f in hive-default.xml hive-env.sh hive-exec-log4j.properties hive-log4j.properties; do
+ cp -p build/dist/conf/${f}.template %{buildroot}%{_datadir}/%{name}/conf/${f}
+ ln -s %{_datadir}/%{name}/conf/${f} %{buildroot}%{_sysconfdir}/%{name}/${f}
+done
+
+# lib
+rsync -aP build/dist/lib/*.jar build/ivy/lib/hadoop0.%{hadoop_generation}.shim/*.jar \
+ %{buildroot}%{_datadir}/%{name}/lib
+# xmvn-subst won't link JDK tools.jar
+rm %{buildroot}%{_datadir}/%{name}/lib/tools-*.jar
+%{_bindir}/xmvn-subst %{buildroot}%{_datadir}/%{name}/lib
+
+# manually replace the hive jars with their links
+rm %{buildroot}%{_datadir}/%{name}/lib/hive-*.jar
+for m in cli common exec metastore serde service shims; do
+ ln -s %{_javadir}/%{name}/%{name}-${m}.jar %{buildroot}%{_datadir}/%{name}/lib/%{name}-${m}-%{version}.jar
+done
+
+# MR needs the shims and thrift jars in it's classpath
+mkdir -p -m0755 %{buildroot}/%{_datadir}/hadoop/mapreduce/lib
+ln -s %{_javadir}/%{name}/%{name}-shims.jar %{buildroot}%{_datadir}/hadoop/mapreduce/lib/%{name}-shims.jar
+ln -s `xmvn-resolve org.apache.thrift:libthrift` %{buildroot}%{_datadir}/hadoop/mapreduce/lib/%{name}-libthrift.jar
+
+%check
+# tests are not run due to dependency on hive hbase support
+# which is currently unavailable
+
+%files -f .mfiles
+%doc LICENSE NOTICE README.txt
+%{_bindir}/*
+%{_datadir}/%{name}
+%{_sysconfdir}/%{name}
+%dir %{_javadir}/%{name}
+%{_datadir}/hadoop/mapreduce/lib/%{name}-shims.jar
+%{_datadir}/hadoop/mapreduce/lib/%{name}-libthrift.jar
+
+%files hcatalog -f hcatalog/.mfiles-hcatalog
+
+%files javadoc -f .mfiles-javadoc
+%doc LICENSE NOTICE
+
+%changelog
+* Thu Feb 27 2014 Peter MacKinnon <pmackinn at redhat.com> 0.12.0-1
+- Initial rpm
+- Add bin and env scripts
+- Remove ivy download
+- Add missing maven plugins
+- Review improvements
+
diff --git a/sources b/sources
index e69de29..4a7e1e2 100644
--- a/sources
+++ b/sources
@@ -0,0 +1 @@
+af5fb945680420dd907bfa341048e5f4 release-0.12.0.tar.gz
More information about the scm-commits
mailing list