[hadoop] Build 2.4.0-1

rrati rrati at fedoraproject.org
Tue May 27 17:12:58 UTC 2014


commit bec9c3ebe5167421380a881180aa0ce8b9a58875
Author: Robert Rati <rrati at redhat.com>
Date:   Tue May 27 13:12:24 2014 -0400

    Build 2.4.0-1

 .gitignore                      |    1 +
 hadoop-dlopen-libjvm.patch      |   16 +-
 hadoop-fedora-integration.patch | 2504 +++++++++++++++++----------------------
 hadoop-guava-0.15.patch         |   56 -
 hadoop-guava-15.0.patch         |  144 +++
 hadoop-hdfs-site.xml            |    8 +-
 hadoop-jetty-9.1.0.patch        |   70 --
 hadoop-limits.conf              |    3 -
 hadoop-maven.patch              |   10 +-
 hadoop-netty-3.6.6-Final.patch  |   31 +
 hadoop-no-download-tomcat.patch |    7 +-
 hadoop-tools.jar.patch          |   32 +
 hadoop.spec                     |  339 ++++---
 sources                         |    2 +-
 14 files changed, 1535 insertions(+), 1688 deletions(-)
---
diff --git a/.gitignore b/.gitignore
index 448da84..35646fd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
 /hadoop-2.0.5-b92d9bc.tar.gz
 /hadoop-2.2.0-2e01e27.tar.gz
+/hadoop-2.4.0-9d04888.tar.gz
diff --git a/hadoop-dlopen-libjvm.patch b/hadoop-dlopen-libjvm.patch
index 61dc0b1..17ad813 100644
--- a/hadoop-dlopen-libjvm.patch
+++ b/hadoop-dlopen-libjvm.patch
@@ -1,8 +1,8 @@
 diff --git a/hadoop-common-project/hadoop-common/src/CMakeLists.txt b/hadoop-common-project/hadoop-common/src/CMakeLists.txt
-index bf8ac7b..76e125c 100644
+index dec63c4..de21bab 100644
 --- a/hadoop-common-project/hadoop-common/src/CMakeLists.txt
 +++ b/hadoop-common-project/hadoop-common/src/CMakeLists.txt
-@@ -202,7 +202,6 @@ ENDIF()
+@@ -205,7 +205,6 @@ ENDIF()
  
  target_link_dual_libraries(hadoop
      ${LIB_DL}
@@ -11,15 +11,15 @@ index bf8ac7b..76e125c 100644
  SET(LIBHADOOP_VERSION "1.0.0")
  SET_TARGET_PROPERTIES(hadoop PROPERTIES
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
-index 5652892..9177e08 100644
+index 82d1a32..2151bb8 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
-@@ -90,7 +90,6 @@ add_dual_library(hdfs
-     main/native/libhdfs/hdfs.c
- )
+@@ -99,7 +99,6 @@ if (NEED_LINK_DL)
+ endif(NEED_LINK_DL)
+ 
  target_link_dual_libraries(hdfs
 -    ${JAVA_JVM_LIBRARY}
-     dl
+     ${LIB_DL}
      pthread
  )
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/CMakeLists.txt
@@ -35,7 +35,7 @@ index dd3f1e6..68ba422 100644
          m
          pthread
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c
-index c768c9c..1079a6c 100644
+index 878289f..62686b3 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c
 @@ -20,6 +20,7 @@
diff --git a/hadoop-fedora-integration.patch b/hadoop-fedora-integration.patch
index 5843621..1e7fe81 100644
--- a/hadoop-fedora-integration.patch
+++ b/hadoop-fedora-integration.patch
@@ -1,5 +1,5 @@
 diff --git a/hadoop-client/pom.xml b/hadoop-client/pom.xml
-index 26435ca..a179ed4 100644
+index 585bbb6..974213e 100644
 --- a/hadoop-client/pom.xml
 +++ b/hadoop-client/pom.xml
 @@ -40,12 +40,8 @@
@@ -49,85 +49,72 @@ index 26435ca..a179ed4 100644
          </exclusion>
          <exclusion>
            <groupId>com.sun.jersey</groupId>
-diff --git a/hadoop-common-project/hadoop-annotations/pom.xml b/hadoop-common-project/hadoop-annotations/pom.xml
-index ac1e7fe..114c340 100644
---- a/hadoop-common-project/hadoop-annotations/pom.xml
-+++ b/hadoop-common-project/hadoop-annotations/pom.xml
-@@ -48,11 +48,8 @@
-       </activation>
-       <dependencies>
-         <dependency>
--          <groupId>jdk.tools</groupId>
--          <artifactId>jdk.tools</artifactId>
--          <version>1.6</version>
--          <scope>system</scope>
--          <systemPath>${java.home}/../lib/tools.jar</systemPath>
-+          <groupId>com.sun</groupId>
-+          <artifactId>tools</artifactId>
-         </dependency>
-       </dependencies>
-     </profile>
-@@ -63,11 +60,8 @@
-       </activation>
-       <dependencies>
-         <dependency>
--          <groupId>jdk.tools</groupId>
--          <artifactId>jdk.tools</artifactId>
--          <version>1.7</version>
--          <scope>system</scope>
--          <systemPath>${java.home}/../lib/tools.jar</systemPath>
-+          <groupId>com.sun</groupId>
-+          <artifactId>tools</artifactId>
-         </dependency>
-       </dependencies>
-     </profile>
+@@ -132,8 +124,8 @@
+           <artifactId>avro</artifactId>
+         </exclusion>
+         <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jetty</artifactId>
++          <groupId>org.eclipse.jetty</groupId>
++          <artifactId>jetty-server</artifactId>
+         </exclusion>
+         <exclusion>
+           <groupId>com.sun.jersey</groupId>
 diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml
-index 8819941..cca9008 100644
+index f692ca3..8bb2429 100644
 --- a/hadoop-common-project/hadoop-auth/pom.xml
 +++ b/hadoop-common-project/hadoop-auth/pom.xml
-@@ -54,8 +54,9 @@
+@@ -53,18 +53,9 @@
        <scope>test</scope>
      </dependency>
      <dependency>
 -      <groupId>org.mortbay.jetty</groupId>
+-      <artifactId>jetty-util</artifactId>
+-      <scope>test</scope>
+-    </dependency>
+-    <dependency>
+-      <groupId>org.mortbay.jetty</groupId>
+-      <artifactId>jetty-util</artifactId>
+-      <scope>test</scope>
+-    </dependency>
+-    <dependency>
+-      <groupId>org.mortbay.jetty</groupId>
 -      <artifactId>jetty</artifactId>
 +      <groupId>org.eclipse.jetty</groupId>
 +      <artifactId>jetty-servlet</artifactId>
-+      <version>9.0.4.v20130625</version>
++      <version>8.1.14.v20131031</version>
        <scope>test</scope>
      </dependency>
      <dependency>
 diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
-index 6059d8c..bba1a00 100644
+index 4e4ecc4..3429931 100644
 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
 +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
-@@ -17,11 +17,14 @@
+@@ -14,11 +14,12 @@
+ package org.apache.hadoop.security.authentication.client;
+ 
  import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
- import junit.framework.TestCase;
- import org.mockito.Mockito;
 -import org.mortbay.jetty.Server;
 -import org.mortbay.jetty.servlet.Context;
 -import org.mortbay.jetty.servlet.FilterHolder;
 -import org.mortbay.jetty.servlet.ServletHolder;
--
-+import org.eclipse.jetty.server.Connector;
 +import org.eclipse.jetty.server.Server;
-+import org.eclipse.jetty.server.ServerConnector;
-+import org.eclipse.jetty.servlet.ServletContextHandler;
 +import org.eclipse.jetty.servlet.FilterHolder;
++import org.eclipse.jetty.servlet.ServletContextHandler;
 +import org.eclipse.jetty.servlet.ServletHolder;
-+
+ 
 +import javax.servlet.DispatcherType;
  import javax.servlet.FilterConfig;
  import javax.servlet.ServletException;
  import javax.servlet.http.HttpServlet;
-@@ -38,12 +41,13 @@
+@@ -35,13 +36,14 @@
  import java.net.ServerSocket;
  import java.net.URL;
  import java.util.Properties;
 +import java.util.EnumSet;
+ import org.junit.Assert;
  
- public abstract class AuthenticatorTestCase extends TestCase {
+ public class AuthenticatorTestCase {
    private Server server;
    private String host = null;
    private int port = -1;
@@ -136,7 +123,7 @@ index 6059d8c..bba1a00 100644
  
    private static Properties authenticatorConfig;
  
-@@ -84,17 +88,19 @@ protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws S
+@@ -82,10 +84,10 @@ protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws S
  
    protected void start() throws Exception {
      server = new Server(0);
@@ -149,78 +136,43 @@ index 6059d8c..bba1a00 100644
      context.addServlet(new ServletHolder(TestServlet.class), "/bar");
      host = "localhost";
      ServerSocket ss = new ServerSocket(0);
-     port = ss.getLocalPort();
-     ss.close();
--    server.getConnectors()[0].setHost(host);
--    server.getConnectors()[0].setPort(port);
-+    ServerConnector connector = new ServerConnector(server);
-+    connector.setHost(host);
-+    connector.setPort(port);
-+    server.setConnectors(new Connector[] { connector });
-     server.start();
-     System.out.println("Running embedded servlet container at: http://" + host + ":" + port);
-   }
 diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
-index 89691c6..82458e7 100644
+index 7eae610..479fed0 100644
 --- a/hadoop-common-project/hadoop-common/pom.xml
 +++ b/hadoop-common-project/hadoop-common/pom.xml
-@@ -53,7 +53,7 @@
-     </dependency>
-     <dependency>
-       <groupId>org.apache.commons</groupId>
--      <artifactId>commons-math</artifactId>
-+      <artifactId>commons-math3</artifactId>
+@@ -87,18 +87,25 @@
        <scope>compile</scope>
      </dependency>
      <dependency>
-@@ -82,20 +82,43 @@
-       <scope>compile</scope>
-     </dependency>
-     <dependency>
-+      <groupId>commons-collections</groupId>
-+      <artifactId>commons-collections</artifactId>
-+      <scope>compile</scope>
-+    </dependency>
-+    <dependency>
-       <groupId>javax.servlet</groupId>
-       <artifactId>servlet-api</artifactId>
+-      <groupId>javax.servlet</groupId>
+-      <artifactId>servlet-api</artifactId>
++      <groupId>org.eclipse.jetty</groupId>
++      <artifactId>jetty-server</artifactId>
        <scope>compile</scope>
      </dependency>
      <dependency>
 -      <groupId>org.mortbay.jetty</groupId>
 -      <artifactId>jetty</artifactId>
 +      <groupId>org.eclipse.jetty</groupId>
-+      <artifactId>jetty-server</artifactId>
++      <artifactId>jetty-util</artifactId>
        <scope>compile</scope>
      </dependency>
      <dependency>
 -      <groupId>org.mortbay.jetty</groupId>
-+      <groupId>org.eclipse.jetty</groupId>
-       <artifactId>jetty-util</artifactId>
-       <scope>compile</scope>
-     </dependency>
-+    <dependency>
-+      <groupId>org.eclipse.jetty</groupId>
-+      <artifactId>jetty-util-ajax</artifactId>
-+      <version>9.0.4.v20130625</version>
-+      <scope>compile</scope>
-+    </dependency>
-+    <dependency>
+-      <artifactId>jetty-util</artifactId>
 +      <groupId>org.eclipse.jetty</groupId>
 +      <artifactId>jetty-servlet</artifactId>
-+      <version>9.0.4.v20130625</version>
++      <version>8.1.14.v20131031</version>
 +      <scope>compile</scope>
 +    </dependency>
 +    <dependency>
 +      <groupId>org.eclipse.jetty</groupId>
 +      <artifactId>jetty-webapp</artifactId>
-+      <version>9.0.4.v20130625</version>
-+      <scope>compile</scope>
-+    </dependency>
++      <version>8.1.14.v20131031</version>
+       <scope>compile</scope>
+     </dependency>
  
-     <dependency>
-       <groupId>com.sun.jersey</groupId>
-@@ -113,21 +136,30 @@
+@@ -118,21 +125,26 @@
        <artifactId>jersey-server</artifactId>
        <scope>compile</scope>
      </dependency>
@@ -229,192 +181,35 @@ index 89691c6..82458e7 100644
 +      <artifactId>jersey-servlet</artifactId>
 +      <scope>compile</scope>
 +    </dependency>
-+    <dependency>
-+        <groupId>org.glassfish.web</groupId>
-+        <artifactId>javax.servlet.jsp</artifactId>
-+    </dependency>
  
      <dependency>
 -      <groupId>tomcat</groupId>
 -      <artifactId>jasper-compiler</artifactId>
+-      <scope>runtime</scope>
 +      <groupId>org.apache.tomcat</groupId>
-+      <artifactId>tomcat-jasper</artifactId>
-       <scope>runtime</scope>
++      <artifactId>tomcat-servlet-api</artifactId>
++      <version>7.0.37</version>
      </dependency>
      <dependency>
 -      <groupId>tomcat</groupId>
 -      <artifactId>jasper-runtime</artifactId>
 -      <scope>runtime</scope>
-+        <groupId>org.apache.tomcat</groupId>
-+        <artifactId>tomcat-servlet-api</artifactId>
-+        <version>7.0.37</version>
++      <groupId>org.glassfish.web</groupId>
++      <artifactId>javax.servlet.jsp</artifactId>
++      <version>2.2.6</version>
      </dependency>
      <dependency>
 -      <groupId>javax.servlet.jsp</groupId>
 -      <artifactId>jsp-api</artifactId>
 -      <scope>runtime</scope>
-+        <groupId>org.apache.tomcat</groupId>
-+        <artifactId>tomcat-el-api</artifactId>
-+        <version>7.0.37</version>
++      <groupId>org.apache.tomcat</groupId>
++      <artifactId>tomcat-el-api</artifactId>
++      <version>7.0.37</version>
      </dependency>
      <dependency>
        <groupId>commons-el</groupId>
-@@ -213,6 +245,10 @@
-       <groupId>com.jcraft</groupId>
-       <artifactId>jsch</artifactId>
-     </dependency>
-+    <dependency>
-+      <groupId>com.google.code.findbugs</groupId>
-+      <artifactId>jsr305</artifactId>
-+    </dependency>
- 
-     <dependency>
-       <groupId>org.apache.zookeeper</groupId>
-diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
-index 4adc306..995657f 100644
---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
-+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
-@@ -42,6 +42,7 @@
- import org.apache.hadoop.fs.s3.INode.FileType;
- import org.jets3t.service.S3Service;
- import org.jets3t.service.S3ServiceException;
-+import org.jets3t.service.ServiceException;
- import org.jets3t.service.impl.rest.httpclient.RestS3Service;
- import org.jets3t.service.model.S3Bucket;
- import org.jets3t.service.model.S3Object;
-@@ -60,8 +61,8 @@
-   private static final String FILE_SYSTEM_VERSION_NAME = "fs-version";
-   private static final String FILE_SYSTEM_VERSION_VALUE = "1";
-   
--  private static final Map<String, String> METADATA =
--    new HashMap<String, String>();
-+  private static final Map<String, Object> METADATA =
-+    new HashMap<String, Object>();
-   
-   static {
-     METADATA.put(FILE_SYSTEM_NAME, FILE_SYSTEM_VALUE);
-@@ -173,6 +174,9 @@ private InputStream get(String key, boolean checkMetadata)
-       }
-       throw new S3Exception(e);
-     }
-+    catch (ServiceException e) {
-+      throw new S3Exception(e);
-+    }
-   }
- 
-   private InputStream get(String key, long byteRangeStart) throws IOException {
-@@ -189,6 +193,9 @@ private InputStream get(String key, long byteRangeStart) throws IOException {
-       }
-       throw new S3Exception(e);
-     }
-+    catch (ServiceException e) {
-+      throw new S3Exception(e);
-+    }
-   }
- 
-   private void checkMetadata(S3Object object) throws S3FileSystemException,
-diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
-index 416bfb1..32fe6b6 100644
---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
-+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
-@@ -34,6 +34,7 @@
- import org.apache.hadoop.util.ToolRunner;
- import org.jets3t.service.S3Service;
- import org.jets3t.service.S3ServiceException;
-+import org.jets3t.service.ServiceException;
- import org.jets3t.service.impl.rest.httpclient.RestS3Service;
- import org.jets3t.service.model.S3Bucket;
- import org.jets3t.service.model.S3Object;
-@@ -248,6 +249,9 @@ private InputStream get(String key) throws IOException {
-         }
-         throw new S3Exception(e);
-       }
-+     catch (ServiceException e) {
-+        throw new S3Exception(e);
-+     }
-     }
-     
-     private String pathToKey(Path path) {
-diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
-index 400419c..f54d58f 100644
---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
-+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
-@@ -37,6 +37,7 @@
- import org.jets3t.service.S3ObjectsChunk;
- import org.jets3t.service.S3Service;
- import org.jets3t.service.S3ServiceException;
-+import org.jets3t.service.ServiceException;
- import org.jets3t.service.impl.rest.httpclient.RestS3Service;
- import org.jets3t.service.model.S3Bucket;
- import org.jets3t.service.model.S3Object;
-@@ -124,12 +125,15 @@ public FileMetadata retrieveMetadata(String key) throws IOException {
-   @Override
-   public InputStream retrieve(String key) throws IOException {
-     try {
--      S3Object object = s3Service.getObject(bucket, key);
-+      S3Object object = s3Service.getObject(bucket.getName(), key);
-       return object.getDataInputStream();
-     } catch (S3ServiceException e) {
-       handleServiceException(key, e);
-       return null; //never returned - keep compiler happy
-     }
-+    catch (ServiceException e) {
-+      throw new S3Exception(e);
-+    }
-   }
-   
-   @Override
-@@ -143,6 +147,9 @@ public InputStream retrieve(String key, long byteRangeStart)
-       handleServiceException(key, e);
-       return null; //never returned - keep compiler happy
-     }
-+    catch (ServiceException e) {
-+      throw new S3Exception(e);
-+    }
-   }
- 
-   @Override
-@@ -165,7 +172,7 @@ private PartialListing list(String prefix, String delimiter,
-       if (prefix.length() > 0 && !prefix.endsWith(PATH_DELIMITER)) {
-         prefix += PATH_DELIMITER;
-       }
--      S3ObjectsChunk chunk = s3Service.listObjectsChunked(bucket.getName(),
-+      S3ObjectsChunk chunk = (S3ObjectsChunk)s3Service.listObjectsChunked(bucket.getName(),
-           prefix, delimiter, maxListingLength, priorLastKey);
-       
-       FileMetadata[] fileMetadata =
-@@ -181,6 +188,9 @@ private PartialListing list(String prefix, String delimiter,
-       handleServiceException(e);
-       return null; //never returned - keep compiler happy
-     }
-+    catch (ServiceException e) {
-+      throw new S3Exception(e);
-+    }
-   }
- 
-   @Override
-@@ -190,6 +200,9 @@ public void delete(String key) throws IOException {
-     } catch (S3ServiceException e) {
-       handleServiceException(key, e);
-     }
-+    catch (ServiceException e) {
-+      throw new S3Exception(e);
-+    }
-   }
-   
-   @Override
-@@ -200,6 +213,9 @@ public void copy(String srcKey, String dstKey) throws IOException {
-     } catch (S3ServiceException e) {
-       handleServiceException(srcKey, e);
-     }
-+    catch (ServiceException e) {
-+      throw new S3Exception(e);
-+    }
-   }
- 
-   @Override
 diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java
-index 9e318ae..949db05 100644
+index ef562b4..a4b05a1 100644
 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java
 +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java
 @@ -23,7 +23,7 @@
@@ -426,11 +221,26 @@ index 9e318ae..949db05 100644
  
  /**
   * General servlet which is admin-authorized.
+diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java
+index 52d9850..a7c23b9 100644
+--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java
++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java
+@@ -25,8 +25,8 @@
+ import org.apache.commons.logging.LogFactory;
+ import org.apache.log4j.Appender;
+ import org.apache.log4j.Logger;
+-import org.mortbay.jetty.NCSARequestLog;
+-import org.mortbay.jetty.RequestLog;
++import org.eclipse.jetty.server.NCSARequestLog;
++import org.eclipse.jetty.server.RequestLog;
+ 
+ /**
+  * RequestLog object for use with Http
 diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
-index c5cd556..bd11b9c 100644
+index 3ad26c6..f87c68a 100644
 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
-@@ -61,27 +61,28 @@
+@@ -62,27 +62,29 @@
  import org.apache.hadoop.security.ssl.SSLFactory;
  import org.apache.hadoop.util.ReflectionUtils;
  import org.apache.hadoop.util.Shell;
@@ -452,14 +262,15 @@ index c5cd556..bd11b9c 100644
 -import org.mortbay.jetty.webapp.WebAppContext;
 -import org.mortbay.thread.QueuedThreadPool;
 -import org.mortbay.util.MultiException;
++import org.eclipse.jetty.io.Buffer;
++import org.eclipse.jetty.server.Connector;
 +import org.eclipse.jetty.server.Handler;
 +import org.eclipse.jetty.http.MimeTypes;
-+import org.eclipse.jetty.server.HttpConfiguration;
-+import org.eclipse.jetty.server.HttpConnectionFactory;
 +import org.eclipse.jetty.server.Server;
-+import org.eclipse.jetty.server.ServerConnector;
 +import org.eclipse.jetty.server.handler.ContextHandler;
 +import org.eclipse.jetty.server.handler.ContextHandlerCollection;
++import org.eclipse.jetty.server.nio.SelectChannelConnector;
++import org.eclipse.jetty.server.ssl.SslSocketConnector;
 +import org.eclipse.jetty.servlet.ServletContextHandler;
 +import org.eclipse.jetty.servlet.DefaultServlet;
 +import org.eclipse.jetty.servlet.FilterHolder;
@@ -477,13 +288,8 @@ index c5cd556..bd11b9c 100644
  /**
   * Create a Jetty embedded server to answer http requests. The primary goal
   * is to serve up status information for the server.
-@@ -112,11 +113,12 @@
- 
-   private SSLFactory sslFactory;
-   protected final Server webServer;
--  protected final Connector listener;
-+  protected final ContextHandlerCollection contexts;
-+  protected final ServerConnector listener;
+@@ -122,8 +124,8 @@
+   protected final Connector listener;
    protected final WebAppContext webAppContext;
    protected final boolean findPort;
 -  protected final Map<Context, Boolean> defaultContexts =
@@ -493,79 +299,7 @@ index c5cd556..bd11b9c 100644
    protected final List<String> filterNames = new ArrayList<String>();
    private static final int MAX_RETRIES = 10;
    static final String STATE_DESCRIPTION_ALIVE = " - alive";
-@@ -127,12 +129,12 @@
-   /** Same as this(name, bindAddress, port, findPort, null); */
-   public HttpServer(String name, String bindAddress, int port, boolean findPort
-       ) throws IOException {
--    this(name, bindAddress, port, findPort, new Configuration());
-+    this(name, bindAddress, port, findPort, new Configuration(), null, null);
-   }
- 
-   public HttpServer(String name, String bindAddress, int port,
--      boolean findPort, Configuration conf, Connector connector) throws IOException {
--    this(name, bindAddress, port, findPort, conf, null, connector, null);
-+      boolean findPort, Configuration conf, ServerConnector connector) throws IOException {
-+    this(name, bindAddress, port, findPort, conf, null, connector, null, null);
-   }
- 
-   /**
-@@ -151,7 +153,7 @@ public HttpServer(String name, String bindAddress, int port,
-    */
-   public HttpServer(String name, String bindAddress, int port,
-       boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
--    this(name, bindAddress, port, findPort, conf, null, null, pathSpecs);
-+    this(name, bindAddress, port, findPort, conf, null, null, pathSpecs, null);
-   }
-   
-   /**
-@@ -165,13 +167,13 @@ public HttpServer(String name, String bindAddress, int port,
-    */
-   public HttpServer(String name, String bindAddress, int port,
-       boolean findPort, Configuration conf) throws IOException {
--    this(name, bindAddress, port, findPort, conf, null, null, null);
-+    this(name, bindAddress, port, findPort, conf, null, null, null, null);
-   }
- 
-   public HttpServer(String name, String bindAddress, int port,
-       boolean findPort, Configuration conf, AccessControlList adminsAcl) 
-       throws IOException {
--    this(name, bindAddress, port, findPort, conf, adminsAcl, null, null);
-+    this(name, bindAddress, port, findPort, conf, adminsAcl, null, null, null);
-   }
- 
-   /**
-@@ -187,8 +189,8 @@ public HttpServer(String name, String bindAddress, int port,
-    */
-   public HttpServer(String name, String bindAddress, int port,
-       boolean findPort, Configuration conf, AccessControlList adminsAcl, 
--      Connector connector) throws IOException {
--    this(name, bindAddress, port, findPort, conf, adminsAcl, connector, null);
-+      ServerConnector connector) throws IOException {
-+    this(name, bindAddress, port, findPort, conf, adminsAcl, connector, null, null);
-   }
- 
-   /**
-@@ -207,11 +209,17 @@ public HttpServer(String name, String bindAddress, int port,
-    */
-   public HttpServer(String name, String bindAddress, int port,
-       boolean findPort, Configuration conf, AccessControlList adminsAcl, 
--      Connector connector, String[] pathSpecs) throws IOException {
--    webServer = new Server();
-+      ServerConnector connector, String[] pathSpecs,
-+      Server server) throws IOException {
-     this.findPort = findPort;
-     this.adminsAcl = adminsAcl;
-     
-+    if(server == null) {
-+      webServer = createServer(conf);
-+    } else {
-+      webServer = server;
-+    }
-+
-     if(connector == null) {
-       listenerStartedExternally = false;
-       if (HttpConfig.isSecure()) {
-@@ -221,11 +229,18 @@ public HttpServer(String name, String bindAddress, int port,
+@@ -229,11 +231,18 @@ public HttpServer(String name, String bindAddress, int port,
          } catch (GeneralSecurityException ex) {
            throw new IOException(ex);
          }
@@ -578,96 +312,46 @@ index c5cd556..bd11b9c 100644
 +        SslContextFactory sslContextFactory = new SslContextFactory(conf.get("ssl.server.keystore.location",""));
 +        sslContextFactory.setKeyStorePassword(conf.get("ssl.server.keystore.password",""));
 +        if (sslFactory.isClientCertRequired()) {
-+            sslContextFactory.setTrustStorePath(conf.get("ssl.server.truststore.location",""));
++            sslContextFactory.setTrustStore(conf.get("ssl.server.truststore.location",""));
 +            sslContextFactory.setTrustStorePassword(conf.get("ssl.server.truststore.password",""));
 +            sslContextFactory.setTrustStoreType(conf.get("ssl.server.truststore.type", "jks"));
 +        }
-+        ServerConnector sslListener = new ServerConnector(webServer, sslContextFactory) {
++        SslSocketConnector sslListener = new SslSocketConnector(sslContextFactory) {
 +            protected SSLServerSocketFactory createFactory() throws Exception {
 +                return sslFactory.createSSLServerSocketFactory();
 +            }
          };
          listener = sslListener;
        } else {
-@@ -240,17 +255,8 @@ protected SSLServerSocketFactory createFactory() throws Exception {
-     
-     webServer.addConnector(listener);
- 
--    int maxThreads = conf.getInt(HTTP_MAX_THREADS, -1);
--    // If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the
--    // default value (currently 250).
--    QueuedThreadPool threadPool = maxThreads == -1 ?
--        new QueuedThreadPool() : new QueuedThreadPool(maxThreads);
--    threadPool.setDaemon(true);
--    webServer.setThreadPool(threadPool);
--
-     final String appDir = getWebAppsPath(name);
--    ContextHandlerCollection contexts = new ContextHandlerCollection();
--    webServer.setHandler(contexts);
-+    contexts = new ContextHandlerCollection();
- 
-     webAppContext = new WebAppContext();
-     webAppContext.setDisplayName(name);
-@@ -259,7 +265,8 @@ protected SSLServerSocketFactory createFactory() throws Exception {
+@@ -267,11 +276,15 @@ protected SSLServerSocketFactory createFactory() throws Exception {
      webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
      webAppContext.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
      addNoCacheFilter(webAppContext);
 -    webServer.addHandler(webAppContext);
-+    contexts.addHandler(webAppContext);
-+    webServer.setHandler(contexts);
++
++    ContextHandlerCollection handlers = new ContextHandlerCollection();
++    handlers.setHandlers(webServer.getHandlers());
++    handlers.addHandler(webAppContext);
++    webServer.setHandler(handlers);
  
      addDefaultApps(contexts, appDir, conf);
          
-@@ -294,26 +301,37 @@ private void addNoCacheFilter(WebAppContext ctxt) {
-    * provided. This wrapper and all subclasses must create at least one
-    * listener.
-    */
--  public Connector createBaseListener(Configuration conf) throws IOException {
--    return HttpServer.createDefaultChannelConnector();
-+  public ServerConnector createBaseListener(Configuration conf) throws IOException {
-+    return HttpServer.createDefaultChannelConnector(webServer);
-   }
-   
-   @InterfaceAudience.Private
--  public static Connector createDefaultChannelConnector() {
--    SelectChannelConnector ret = new SelectChannelConnector();
--    ret.setLowResourceMaxIdleTime(10000);
--    ret.setAcceptQueueSize(128);
--    ret.setResolveNames(false);
--    ret.setUseDirectBuffers(false);
-+  public static ServerConnector createDefaultChannelConnector(Server server) {
-+    HttpConfiguration http_config = new HttpConfiguration();
-+    http_config.setRequestHeaderSize(1024*64);
-+
-+    ServerConnector conn = new ServerConnector(server, new HttpConnectionFactory(http_config));
-+    conn.setAcceptQueueSize(128);
-+    conn.setIdleTimeout(10000);
-     if(Shell.WINDOWS) {
-       // result of setting the SO_REUSEADDR flag is different on Windows
-       // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
-       // without this 2 NN's can start on the same machine and listen on 
+-    addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
++    addGlobalFilter("safety", QuotingInputFilter.class.getName(), new HashMap<String,String>(0));
+     final FilterInitializer[] initializers = getFilterInitializers(conf); 
+     if (initializers != null) {
+       conf = new Configuration(conf);
+@@ -320,7 +333,8 @@ public static Connector createDefaultChannelConnector() {
        // the same port with indeterminate routing of incoming requests to them
--      ret.setReuseAddress(false);
-+      conn.setReuseAddress(false);
+       ret.setReuseAddress(false);
      }
 -    ret.setHeaderBufferSize(1024*64);
--    return ret;
-+    return conn;
-+  }
-+
-+  @InterfaceAudience.Private
-+  public static Server createServer(Configuration conf) {
-+    int maxThreads = conf.getInt(HTTP_MAX_THREADS, -1);
-+    // If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the
-+    // default value (currently 250).
-+    QueuedThreadPool threadPool = maxThreads == -1 ?
-+        new QueuedThreadPool() : new QueuedThreadPool(maxThreads);
-+    threadPool.setDaemon(true);
-+    return new Server(threadPool);
++    ret.setRequestHeaderSize(1024*64);
++    ret.setResponseHeaderSize(1024*64);
+     return ret;
    }
  
-   /** Get an array of FilterConfiguration specified in the conf */
-@@ -345,14 +363,14 @@ protected void addDefaultApps(ContextHandlerCollection parent,
+@@ -353,14 +367,14 @@ protected void addDefaultApps(ContextHandlerCollection parent,
      // set up the context for "/logs/" if "hadoop.log.dir" property is defined. 
      String logDir = System.getProperty("hadoop.log.dir");
      if (logDir != null) {
@@ -684,7 +368,7 @@ index c5cd556..bd11b9c 100644
        }
        logContext.setDisplayName("logs");
        setContextAttributes(logContext, conf);
-@@ -360,7 +378,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
+@@ -368,7 +382,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
        defaultContexts.put(logContext, true);
      }
      // set up the context for "/static/*"
@@ -693,7 +377,7 @@ index c5cd556..bd11b9c 100644
      staticContext.setResourceBase(appDir + "/static");
      staticContext.addServlet(DefaultServlet.class, "/*");
      staticContext.setDisplayName("static");
-@@ -368,7 +386,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
+@@ -376,7 +390,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
      defaultContexts.put(staticContext, true);
    }
    
@@ -702,7 +386,7 @@ index c5cd556..bd11b9c 100644
      context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
      context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
    }
-@@ -385,10 +403,11 @@ protected void addDefaultServlets() {
+@@ -393,9 +407,12 @@ protected void addDefaultServlets() {
      addServlet("conf", "/conf", ConfServlet.class);
    }
  
@@ -710,13 +394,14 @@ index c5cd556..bd11b9c 100644
 +  public void addContext(ServletContextHandler ctxt, boolean isFiltered)
        throws IOException {
 -    webServer.addHandler(ctxt);
++    ContextHandlerCollection handlers = new ContextHandlerCollection();
++    handlers.setHandlers(webServer.getHandlers());
++    handlers.addHandler(ctxt);
++    webServer.setHandler(handlers);
      addNoCacheFilter(webAppContext);
-+    contexts.addHandler(ctxt);
-+    webServer.setHandler(contexts);
      defaultContexts.put(ctxt, isFiltered);
    }
- 
-@@ -489,7 +508,7 @@ public void addInternalServlet(String name, String pathSpec,
+@@ -497,7 +514,7 @@ public void addInternalServlet(String name, String pathSpec,
         FilterMapping fmap = new FilterMapping();
         fmap.setPathSpec(pathSpec);
         fmap.setFilterName(SPNEGO_FILTER);
@@ -725,7 +410,7 @@ index c5cd556..bd11b9c 100644
         handler.addFilterMapping(fmap);
      }
    }
-@@ -503,9 +522,9 @@ public void addFilter(String name, String classname,
+@@ -511,9 +528,9 @@ public void addFilter(String name, String classname,
      LOG.info("Added filter " + name + " (class=" + classname
          + ") to context " + webAppContext.getDisplayName());
      final String[] ALL_URLS = { "/*" };
@@ -737,7 +422,7 @@ index c5cd556..bd11b9c 100644
          defineFilter(ctx, name, classname, parameters, ALL_URLS);
          LOG.info("Added filter " + name + " (class=" + classname
              + ") to context " + ctx.getDisplayName());
-@@ -519,7 +538,7 @@ public void addGlobalFilter(String name, String classname,
+@@ -527,7 +544,7 @@ public void addGlobalFilter(String name, String classname,
        Map<String, String> parameters) {
      final String[] ALL_URLS = { "/*" };
      defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
@@ -746,7 +431,7 @@ index c5cd556..bd11b9c 100644
        defineFilter(ctx, name, classname, parameters, ALL_URLS);
      }
      LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
-@@ -528,16 +547,18 @@ public void addGlobalFilter(String name, String classname,
+@@ -536,16 +553,18 @@ public void addGlobalFilter(String name, String classname,
    /**
     * Define a filter for a context and set up default url mappings.
     */
@@ -768,7 +453,7 @@ index c5cd556..bd11b9c 100644
      fmap.setFilterName(name);
      ServletHandler handler = ctx.getServletHandler();
      handler.addFilter(holder, fmap);
-@@ -549,13 +570,13 @@ public void defineFilter(Context ctx, String name,
+@@ -557,13 +576,13 @@ public void defineFilter(Context ctx, String name,
     * @param webAppCtx The WebApplicationContext to add to
     */
    protected void addFilterPathMapping(String pathSpec,
@@ -784,16 +469,7 @@ index c5cd556..bd11b9c 100644
        handler.addFilterMapping(fmap);
      }
    }
-@@ -593,7 +614,7 @@ protected String getWebAppsPath(String appName) throws FileNotFoundException {
-    * @return the port
-    */
-   public int getPort() {
--    return webServer.getConnectors()[0].getLocalPort();
-+    return ((ServerConnector) webServer.getConnectors()[0]).getLocalPort();
-   }
- 
-   /**
-@@ -619,12 +640,12 @@ public void addSslListener(InetSocketAddress addr, String keystore,
+@@ -627,12 +646,12 @@ public void addSslListener(InetSocketAddress addr, String keystore,
      if (webServer.isStarted()) {
        throw new IOException("Failed to add ssl listener");
      }
@@ -801,7 +477,7 @@ index c5cd556..bd11b9c 100644
 +    SslContextFactory sslContextFactory = new SslContextFactory(keystore);
 +    sslContextFactory.setKeyStorePassword(storPass);
 +    sslContextFactory.setKeyManagerPassword(keyPass);
-+    ServerConnector sslListener = new ServerConnector(webServer, sslContextFactory);
++    SslSocketConnector sslListener = new SslSocketConnector(sslContextFactory);
      sslListener.setHost(addr.getHostName());
      sslListener.setPort(addr.getPort());
 -    sslListener.setKeystore(keystore);
@@ -810,7 +486,7 @@ index c5cd556..bd11b9c 100644
      webServer.addConnector(sslListener);
    }
  
-@@ -648,14 +669,14 @@ public void addSslListener(InetSocketAddress addr, Configuration sslConf,
+@@ -656,14 +675,14 @@ public void addSslListener(InetSocketAddress addr, Configuration sslConf,
        System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
            "ssl.server.truststore.type", "jks"));
      }
@@ -820,7 +496,7 @@ index c5cd556..bd11b9c 100644
 +    sslContextFactory.setKeyManagerPassword(sslConf.get("ssl.server.keystore.keypassword", ""));
 +    sslContextFactory.setKeyStoreType(sslConf.get("ssl.server.keystore.type", "jks"));
 +    sslContextFactory.setNeedClientAuth(needCertsAuth);
-+    ServerConnector sslListener = new ServerConnector(webServer, sslContextFactory);
++    SslSocketConnector sslListener = new SslSocketConnector(sslContextFactory);
      sslListener.setHost(addr.getHostName());
      sslListener.setPort(addr.getPort());
 -    sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
@@ -831,102 +507,359 @@ index c5cd556..bd11b9c 100644
      webServer.addConnector(sslListener);
    }
    
-@@ -1087,10 +1108,10 @@ public void doFilter(ServletRequest request,
+@@ -1095,8 +1114,8 @@ public void doFilter(ServletRequest request,
       */
      private String inferMimeType(ServletRequest request) {
        String path = ((HttpServletRequest)request).getRequestURI();
 -      ContextHandler.SContext sContext = (ContextHandler.SContext)config.getServletContext();
 -      MimeTypes mimes = sContext.getContextHandler().getMimeTypes();
--      Buffer mimeBuffer = mimes.getMimeByExtension(path);
--      return (mimeBuffer == null) ? null : mimeBuffer.toString();
 +      ContextHandler.Context context = (ContextHandler.Context)config.getServletContext();
 +      MimeTypes mimes = context.getContextHandler().getMimeTypes();
-+      String mimeBuffer = mimes.getMimeByExtension(path);
-+      return (mimeBuffer == null) ? null : mimeBuffer;
+       Buffer mimeBuffer = mimes.getMimeByExtension(path);
+       return (mimeBuffer == null) ? null : mimeBuffer.toString();
      }
+diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+index 2f28d08..3ac7086 100644
+--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+@@ -39,6 +39,7 @@
+ import javax.servlet.ServletException;
+ import javax.servlet.ServletRequest;
+ import javax.servlet.ServletResponse;
++import javax.servlet.SessionCookieConfig;
+ import javax.servlet.http.HttpServlet;
+ import javax.servlet.http.HttpServletRequest;
+ import javax.servlet.http.HttpServletRequestWrapper;
+@@ -61,29 +62,30 @@
+ import org.apache.hadoop.security.authorize.AccessControlList;
+ import org.apache.hadoop.util.ReflectionUtils;
+ import org.apache.hadoop.util.Shell;
+-import org.mortbay.io.Buffer;
+-import org.mortbay.jetty.Connector;
+-import org.mortbay.jetty.Handler;
+-import org.mortbay.jetty.MimeTypes;
+-import org.mortbay.jetty.RequestLog;
+-import org.mortbay.jetty.Server;
+-import org.mortbay.jetty.SessionManager;
+-import org.mortbay.jetty.handler.ContextHandler;
+-import org.mortbay.jetty.handler.ContextHandlerCollection;
+-import org.mortbay.jetty.handler.HandlerCollection;
+-import org.mortbay.jetty.handler.RequestLogHandler;
+-import org.mortbay.jetty.nio.SelectChannelConnector;
+-import org.mortbay.jetty.security.SslSocketConnector;
+-import org.mortbay.jetty.servlet.AbstractSessionManager;
+-import org.mortbay.jetty.servlet.Context;
+-import org.mortbay.jetty.servlet.DefaultServlet;
+-import org.mortbay.jetty.servlet.FilterHolder;
+-import org.mortbay.jetty.servlet.FilterMapping;
+-import org.mortbay.jetty.servlet.ServletHandler;
+-import org.mortbay.jetty.servlet.ServletHolder;
+-import org.mortbay.jetty.webapp.WebAppContext;
+-import org.mortbay.thread.QueuedThreadPool;
+-import org.mortbay.util.MultiException;
++import org.eclipse.jetty.http.MimeTypes;
++import org.eclipse.jetty.io.Buffer;
++import org.eclipse.jetty.server.Connector;
++import org.eclipse.jetty.server.Handler;
++import org.eclipse.jetty.server.RequestLog;
++import org.eclipse.jetty.server.Server;
++import org.eclipse.jetty.server.SessionManager;
++import org.eclipse.jetty.server.handler.ContextHandler;
++import org.eclipse.jetty.server.handler.ContextHandlerCollection;
++import org.eclipse.jetty.server.handler.HandlerCollection;
++import org.eclipse.jetty.server.handler.RequestLogHandler;
++import org.eclipse.jetty.server.nio.SelectChannelConnector;
++import org.eclipse.jetty.server.session.AbstractSessionManager;
++import org.eclipse.jetty.server.ssl.SslSocketConnector;
++import org.eclipse.jetty.servlet.DefaultServlet;
++import org.eclipse.jetty.servlet.FilterHolder;
++import org.eclipse.jetty.servlet.FilterMapping;
++import org.eclipse.jetty.servlet.ServletContextHandler;
++import org.eclipse.jetty.servlet.ServletHandler;
++import org.eclipse.jetty.servlet.ServletHolder;
++import org.eclipse.jetty.util.MultiException;
++import org.eclipse.jetty.util.ssl.SslContextFactory;
++import org.eclipse.jetty.util.thread.QueuedThreadPool;
++import org.eclipse.jetty.webapp.WebAppContext;
  
-   }
-diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
-index 9d7e1e5..a5e1fe4 100644
---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
-+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
-@@ -36,8 +36,8 @@
- import org.apache.hadoop.metrics.spi.OutputRecord;
- import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
- import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
--import org.mortbay.util.ajax.JSON;
--import org.mortbay.util.ajax.JSON.Output;
-+import org.eclipse.jetty.util.ajax.JSON;
-+import org.eclipse.jetty.util.ajax.JSON.Output;
+ import com.google.common.base.Preconditions;
+ import com.google.common.collect.Lists;
+@@ -138,8 +140,8 @@ private ListenerInfo(boolean isManaged, Connector listener) {
  
- /**
-  * A servlet to print out metrics data.  By default, the servlet returns a 
-diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
-index a90888d..15a5c6c 100644
---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
-+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
-@@ -38,7 +38,7 @@
- import org.apache.commons.configuration.PropertiesConfiguration;
- import org.apache.commons.logging.Log;
- import org.apache.commons.logging.LogFactory;
--import org.apache.commons.math.util.MathUtils;
-+import org.apache.commons.math3.util.ArithmeticUtils;
- import org.apache.hadoop.classification.InterfaceAudience;
- import org.apache.hadoop.metrics2.MetricsInfo;
- import org.apache.hadoop.metrics2.MetricsCollector;
-@@ -460,7 +460,7 @@ private synchronized void configureSinks() {
-       MetricsConfig conf = entry.getValue();
-       int sinkPeriod = conf.getInt(PERIOD_KEY, PERIOD_DEFAULT);
-       confPeriod = confPeriod == 0 ? sinkPeriod
--                                   : MathUtils.gcd(confPeriod, sinkPeriod);
-+                                   : ArithmeticUtils.gcd(confPeriod, sinkPeriod);
-       String clsName = conf.getClassName("");
-       if (clsName == null) continue;  // sink can be registered later on
-       String sinkName = entry.getKey();
-diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
-index 1c22ee6..90846d9 100644
---- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
-+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
-@@ -23,7 +23,7 @@
- import javax.xml.parsers.DocumentBuilder;
- import javax.xml.parsers.DocumentBuilderFactory;
+   protected final WebAppContext webAppContext;
+   protected final boolean findPort;
+-  protected final Map<Context, Boolean> defaultContexts =
+-      new HashMap<Context, Boolean>();
++  protected final Map<ServletContextHandler, Boolean> defaultContexts =
++      new HashMap<ServletContextHandler, Boolean>();
+   protected final List<String> filterNames = new ArrayList<String>();
+   static final String STATE_DESCRIPTION_ALIVE = " - alive";
+   static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
+@@ -305,21 +307,23 @@ public HttpServer2 build() throws IOException {
+         if ("http".equals(scheme)) {
+           listener = HttpServer2.createDefaultChannelConnector();
+         } else if ("https".equals(scheme)) {
+-          SslSocketConnector c = new SslSocketConnector();
+-          c.setNeedClientAuth(needsClientAuth);
+-          c.setKeyPassword(keyPassword);
++          // Jetty 8+ moved JKS config to SslContextFactory
++          SslContextFactory scf = new SslContextFactory();
++          scf.setNeedClientAuth(needsClientAuth);
++          scf.setKeyManagerPassword(keyPassword);
+ 
+           if (keyStore != null) {
+-            c.setKeystore(keyStore);
+-            c.setKeystoreType(keyStoreType);
+-            c.setPassword(keyStorePassword);
++            scf.setKeyStorePath(keyStore);
++            scf.setKeyStoreType(keyStoreType);
++            scf.setKeyStorePassword(keyStorePassword);
+           }
+ 
+           if (trustStore != null) {
+-            c.setTruststore(trustStore);
+-            c.setTruststoreType(trustStoreType);
+-            c.setTrustPassword(trustStorePassword);
++            scf.setTrustStore(trustStore);
++            scf.setTrustStoreType(trustStoreType);
++            scf.setTrustStorePassword(trustStorePassword);
+           }
++          SslSocketConnector c = new SslSocketConnector(scf);
+           listener = c;
+ 
+         } else {
+@@ -362,7 +366,8 @@ private void initializeWebServer(String name, String hostName,
+     if (sm instanceof AbstractSessionManager) {
+       AbstractSessionManager asm = (AbstractSessionManager)sm;
+       asm.setHttpOnly(true);
+-      asm.setSecureCookies(true);
++      SessionCookieConfig scc = asm.getSessionCookieConfig();
++      scc.setSecure(true);
+     }
  
--import org.mortbay.util.ajax.JSON;
-+import org.eclipse.jetty.util.ajax.JSON;
- import org.w3c.dom.Document;
- import org.w3c.dom.Element;
- import org.w3c.dom.Node;
-@@ -107,4 +107,4 @@ public void testBadFormat() throws Exception {
+     ContextHandlerCollection contexts = new ContextHandlerCollection();
+@@ -380,11 +385,14 @@ private void initializeWebServer(String name, String hostName,
+ 
+     final String appDir = getWebAppsPath(name);
+ 
+-    webServer.addHandler(webAppContext);
++    ContextHandlerCollection handlers = new ContextHandlerCollection();
++    handlers.setHandlers(webServer.getHandlers());
++    handlers.addHandler(webAppContext);
++    webServer.setHandler(handlers);
+ 
+     addDefaultApps(contexts, appDir, conf);
+ 
+-    addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
++    addGlobalFilter("safety", QuotingInputFilter.class.getName(), new HashMap<String,String>(0));
+     final FilterInitializer[] initializers = getFilterInitializers(conf);
+     if (initializers != null) {
+       conf = new Configuration(conf);
+@@ -452,7 +460,8 @@ public static Connector createDefaultChannelConnector() {
+       // the same port with indeterminate routing of incoming requests to them
+       ret.setReuseAddress(false);
      }
-     assertEquals("", sw.toString());
+-    ret.setHeaderBufferSize(1024*64);
++    ret.setRequestHeaderSize(1024*64);
++    ret.setResponseHeaderSize(1024*64);
+     return ret;
    }
--}
-\ No newline at end of file
-+}
-diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
-index 807f0cc..3924ee8 100644
---- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
-+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
-@@ -32,7 +32,7 @@
- import org.junit.Assert;
- import org.junit.Before;
- import org.junit.Test;
--import org.mortbay.log.Log;
-+import org.eclipse.jetty.util.log.Log;
  
- /**
-  * <p>
-@@ -779,7 +779,7 @@ public void testRenameNonExistentPath() throws Exception {
-       rename(src, dst, false, false, false, Rename.NONE);
-       Assert.fail("Should throw FileNotFoundException");
-     } catch (IOException e) {
--      Log.info("XXX", e);
-+      Log.getRootLogger().info("XXX", e);
-       Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException);
-     }
+@@ -485,7 +494,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
+     // set up the context for "/logs/" if "hadoop.log.dir" property is defined.
+     String logDir = System.getProperty("hadoop.log.dir");
+     if (logDir != null) {
+-      Context logContext = new Context(parent, "/logs");
++      ServletContextHandler logContext = new ServletContextHandler(parent, "/logs");
+       logContext.setResourceBase(logDir);
+       logContext.addServlet(AdminAuthorizedServlet.class, "/*");
+       if (conf.getBoolean(
+@@ -494,7 +503,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
+         @SuppressWarnings("unchecked")
+         Map<String, String> params = logContext.getInitParams();
+         params.put(
+-            "org.mortbay.jetty.servlet.Default.aliases", "true");
++            "org.eclipse.jetty.servlet.Default.aliases", "true");
+       }
+       logContext.setDisplayName("logs");
+       setContextAttributes(logContext, conf);
+@@ -502,7 +511,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
+       defaultContexts.put(logContext, true);
+     }
+     // set up the context for "/static/*"
+-    Context staticContext = new Context(parent, "/static");
++    ServletContextHandler staticContext = new ServletContextHandler(parent, "/static");
+     staticContext.setResourceBase(appDir + "/static");
+     staticContext.addServlet(DefaultServlet.class, "/*");
+     staticContext.setDisplayName("static");
+@@ -510,7 +519,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
+     defaultContexts.put(staticContext, true);
+   }
+ 
+-  private void setContextAttributes(Context context, Configuration conf) {
++  private void setContextAttributes(ServletContextHandler context, Configuration conf) {
+     context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
+     context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
+   }
+@@ -527,9 +536,12 @@ protected void addDefaultServlets() {
+     addServlet("conf", "/conf", ConfServlet.class);
+   }
+ 
+-  public void addContext(Context ctxt, boolean isFiltered)
++  public void addContext(ServletContextHandler ctxt, boolean isFiltered)
+       throws IOException {
+-    webServer.addHandler(ctxt);
++    ContextHandlerCollection handlers = new ContextHandlerCollection();
++    handlers.setHandlers(webServer.getHandlers());
++    handlers.addHandler(ctxt);
++    webServer.setHandler(handlers);
+     addNoCacheFilter(webAppContext);
+     defaultContexts.put(ctxt, isFiltered);
+   }
+@@ -631,7 +643,7 @@ public void addInternalServlet(String name, String pathSpec,
+        FilterMapping fmap = new FilterMapping();
+        fmap.setPathSpec(pathSpec);
+        fmap.setFilterName(SPNEGO_FILTER);
+-       fmap.setDispatches(Handler.ALL);
++       fmap.setDispatches(FilterMapping.ALL);
+        handler.addFilterMapping(fmap);
+     }
+   }
+@@ -645,9 +657,9 @@ public void addFilter(String name, String classname,
+     LOG.info("Added filter " + name + " (class=" + classname
+         + ") to context " + webAppContext.getDisplayName());
+     final String[] ALL_URLS = { "/*" };
+-    for (Map.Entry<Context, Boolean> e : defaultContexts.entrySet()) {
++    for (Map.Entry<ServletContextHandler, Boolean> e : defaultContexts.entrySet()) {
+       if (e.getValue()) {
+-        Context ctx = e.getKey();
++        ServletContextHandler ctx = e.getKey();
+         defineFilter(ctx, name, classname, parameters, ALL_URLS);
+         LOG.info("Added filter " + name + " (class=" + classname
+             + ") to context " + ctx.getDisplayName());
+@@ -661,7 +673,7 @@ public void addGlobalFilter(String name, String classname,
+       Map<String, String> parameters) {
+     final String[] ALL_URLS = { "/*" };
+     defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
+-    for (Context ctx : defaultContexts.keySet()) {
++    for (ServletContextHandler ctx : defaultContexts.keySet()) {
+       defineFilter(ctx, name, classname, parameters, ALL_URLS);
+     }
+     LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
+@@ -670,7 +682,7 @@ public void addGlobalFilter(String name, String classname,
+   /**
+    * Define a filter for a context and set up default url mappings.
+    */
+-  public static void defineFilter(Context ctx, String name,
++  public static void defineFilter(ServletContextHandler ctx, String name,
+       String classname, Map<String,String> parameters, String[] urls) {
+ 
+     FilterHolder holder = new FilterHolder();
+@@ -679,7 +691,7 @@ public static void defineFilter(Context ctx, String name,
+     holder.setInitParameters(parameters);
+     FilterMapping fmap = new FilterMapping();
+     fmap.setPathSpecs(urls);
+-    fmap.setDispatches(Handler.ALL);
++    fmap.setDispatches(FilterMapping.ALL);
+     fmap.setFilterName(name);
+     ServletHandler handler = ctx.getServletHandler();
+     handler.addFilter(holder, fmap);
+@@ -691,13 +703,13 @@ public static void defineFilter(Context ctx, String name,
+    * @param webAppCtx The WebApplicationContext to add to
+    */
+   protected void addFilterPathMapping(String pathSpec,
+-      Context webAppCtx) {
++      ServletContextHandler webAppCtx) {
+     ServletHandler handler = webAppCtx.getServletHandler();
+     for(String name : filterNames) {
+       FilterMapping fmap = new FilterMapping();
+       fmap.setPathSpec(pathSpec);
+       fmap.setFilterName(name);
+-      fmap.setDispatches(Handler.ALL);
++      fmap.setDispatches(FilterMapping.ALL);
+       handler.addFilterMapping(fmap);
+     }
+   }
+@@ -751,7 +763,8 @@ public InetSocketAddress getConnectorAddress(int index) {
+       return null;
+ 
+     Connector c = webServer.getConnectors()[index];
+-    if (c.getLocalPort() == -1) {
++    // jetty8 has 2 getLocalPort err values
++    if (c.getLocalPort() == -1 || c.getLocalPort() == -2) {
+       // The connector is not bounded
+       return null;
+     }
+@@ -841,7 +854,7 @@ private void loadListeners() {
+   void openListeners() throws Exception {
+     for (ListenerInfo li : listeners) {
+       Connector listener = li.listener;
+-      if (!li.isManaged || li.listener.getLocalPort() != -1) {
++      if (!li.isManaged || (li.listener.getLocalPort() != -1 && li.listener.getLocalPort() != -2)) {
+         // This listener is either started externally or has been bound
+         continue;
+       }
+@@ -1198,8 +1211,8 @@ public void doFilter(ServletRequest request,
+      */
+     private String inferMimeType(ServletRequest request) {
+       String path = ((HttpServletRequest)request).getRequestURI();
+-      ContextHandler.SContext sContext = (ContextHandler.SContext)config.getServletContext();
+-      MimeTypes mimes = sContext.getContextHandler().getMimeTypes();
++      ContextHandler.Context context = (ContextHandler.Context)config.getServletContext();
++      MimeTypes mimes = context.getContextHandler().getMimeTypes();
+       Buffer mimeBuffer = mimes.getMimeByExtension(path);
+       return (mimeBuffer == null) ? null : mimeBuffer.toString();
+     }
+diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
+index 8f5dcd1..a78318a 100644
+--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
+@@ -36,8 +36,8 @@
+ import org.apache.hadoop.metrics.spi.OutputRecord;
+ import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
+ import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
+-import org.mortbay.util.ajax.JSON;
+-import org.mortbay.util.ajax.JSON.Output;
++import org.eclipse.jetty.util.ajax.JSON;
++import org.eclipse.jetty.util.ajax.JSON.Output;
+ 
+ /**
+  * A servlet to print out metrics data.  By default, the servlet returns a 
+diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
+index 1c22ee6..90846d9 100644
+--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
++++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
+@@ -23,7 +23,7 @@
+ import javax.xml.parsers.DocumentBuilder;
+ import javax.xml.parsers.DocumentBuilderFactory;
+ 
+-import org.mortbay.util.ajax.JSON;
++import org.eclipse.jetty.util.ajax.JSON;
+ import org.w3c.dom.Document;
+ import org.w3c.dom.Element;
+ import org.w3c.dom.Node;
+@@ -107,4 +107,4 @@ public void testBadFormat() throws Exception {
+     }
+     assertEquals("", sw.toString());
+   }
+-}
+\ No newline at end of file
++}
+diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
+index 0df0fe7..332af99 100644
+--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
++++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
+@@ -32,7 +32,7 @@
+ import org.junit.Assert;
+ import org.junit.Before;
+ import org.junit.Test;
+-import org.mortbay.log.Log;
++import org.eclipse.jetty.util.log.Log;
  
+ /**
+  * <p>
 diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
-index 81ca210..461a931 100644
+index 81ca210..6ec331f 100644
 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
 +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
 @@ -27,7 +27,7 @@
@@ -938,41 +871,8 @@ index 81ca210..461a931 100644
  
  
  /**
-@@ -84,7 +84,7 @@ static public FileSystem setupForViewFileSystem(Configuration conf, FileSystemTe
- 
-     FileSystem fsView = FileSystem.get(FsConstants.VIEWFS_URI, conf);
-     fsView.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd.
--    Log.info("Working dir is: " + fsView.getWorkingDirectory());
-+    Log.getRootLogger().info("Working dir is: " + fsView.getWorkingDirectory());
-     return fsView;
-   }
- 
-@@ -118,12 +118,12 @@ static void setUpHomeDir(Configuration conf, FileSystem fsTarget) {
-     } else { // home dir is at root. Just link the home dir itse
-       URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri();
-       ConfigUtil.addLink(conf, homeDir, linkTarget);
--      Log.info("Added link for home dir " + homeDir + "->" + linkTarget);
-+      Log.getRootLogger().info("Added link for home dir " + homeDir + "->" + linkTarget);
-     }
-     // Now set the root of the home dir for viewfs
-     String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath();
-     ConfigUtil.setHomeDirConf(conf, homeDirRoot);
--    Log.info("Home dir base for viewfs" + homeDirRoot);  
-+    Log.getRootLogger().info("Home dir base for viewfs" + homeDirRoot);  
-   }
-   
-   /*
-@@ -138,7 +138,7 @@ static void linkUpFirstComponents(Configuration conf, String path, FileSystem fs
-     String firstComponent = path.substring(0, indexOfEnd);
-     URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
-     ConfigUtil.addLink(conf, firstComponent, linkTarget);
--    Log.info("Added link for " + info + " " 
-+    Log.getRootLogger().info("Added link for " + info + " " 
-         + firstComponent + "->" + linkTarget);    
-   }
- }
 diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
-index 92bcbc3..3b62075 100644
+index 92bcbc3..3726e83 100644
 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
 +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
 @@ -26,7 +26,7 @@
@@ -984,55 +884,82 @@ index 92bcbc3..3b62075 100644
  
  
  /**
-@@ -82,7 +82,7 @@ static public FileContext setupForViewFsLocalFs(FileContextTestHelper helper) th
-     
-     FileContext fc = FileContext.getFileContext(FsConstants.VIEWFS_URI, conf);
-     fc.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd.
--    Log.info("Working dir is: " + fc.getWorkingDirectory());
-+    Log.getRootLogger().info("Working dir is: " + fc.getWorkingDirectory());
-     //System.out.println("SRCOfTests = "+ getTestRootPath(fc, "test"));
-     //System.out.println("TargetOfTests = "+ targetOfTests.toUri());
-     return fc;
-@@ -107,12 +107,12 @@ static void setUpHomeDir(Configuration conf, FileContext fsTarget) {
-     } else { // home dir is at root. Just link the home dir itse
-       URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri();
-       ConfigUtil.addLink(conf, homeDir, linkTarget);
--      Log.info("Added link for home dir " + homeDir + "->" + linkTarget);
-+      Log.getRootLogger().info("Added link for home dir " + homeDir + "->" + linkTarget);
+diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
+index 0e4a1ca..e31adff 100644
+--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
++++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
+@@ -22,6 +22,7 @@
+ import java.io.InputStreamReader;
+ import java.net.URL;
+ import java.net.URLConnection;
++import java.util.HashMap;
+ import java.util.Set;
+ import java.util.TreeSet;
+ 
+@@ -75,7 +76,7 @@ public Initializer() {}
+ 
+       @Override
+       public void initFilter(FilterContainer container, Configuration conf) {
+-        container.addGlobalFilter("recording", RecordingFilter.class.getName(), null);
++        container.addGlobalFilter("recording", RecordingFilter.class.getName(), new HashMap<String,String>(0));
+       }
      }
-     // Now set the root of the home dir for viewfs
-     String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath();
-     ConfigUtil.setHomeDirConf(conf, homeDirRoot);
--    Log.info("Home dir base for viewfs" + homeDirRoot);  
-+    Log.getRootLogger().info("Home dir base for viewfs" + homeDirRoot);  
    }
-   
-   /*
-@@ -128,7 +128,7 @@ static void linkUpFirstComponents(Configuration conf, String path,
-     String firstComponent = path.substring(0, indexOfEnd);
-     URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
-     ConfigUtil.addLink(conf, firstComponent, linkTarget);
--    Log.info("Added link for " + info + " " 
-+    Log.getRootLogger().info("Added link for " + info + " " 
-         + firstComponent + "->" + linkTarget);    
+diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java
+index c0aaf64..a29e275 100644
+--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java
++++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java
+@@ -36,6 +36,7 @@
+ import java.net.URI;
+ import java.net.URL;
+ import java.security.GeneralSecurityException;
++import java.util.HashMap;
+ 
+ public class TestHttpCookieFlag {
+   private static final String BASEDIR = System.getProperty("test.build.dir",
+@@ -70,7 +71,7 @@ public void destroy() {
+     @Override
+     public void initFilter(FilterContainer container, Configuration conf) {
+       container.addFilter("DummyAuth", DummyAuthenticationFilter.class
+-              .getName(), null);
++              .getName(), new HashMap<String,String>(0));
+     }
    }
  
+diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java
+index 23e0d3e..24be3fe 100644
+--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java
++++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java
+@@ -19,8 +19,8 @@
+ 
+ import org.apache.log4j.Logger;
+ import org.junit.Test;
+-import org.mortbay.jetty.NCSARequestLog;
+-import org.mortbay.jetty.RequestLog;
++import org.eclipse.jetty.server.NCSARequestLog;
++import org.eclipse.jetty.server.RequestLog;
+ 
+ import static org.junit.Assert.assertEquals;
+ import static org.junit.Assert.assertNotNull;
 diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
-index 079bc37..f0e1f17 100644
+index cb86275..2c1c7bd 100644
 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
 +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
-@@ -60,8 +60,9 @@
+@@ -61,10 +61,11 @@
  import org.junit.AfterClass;
  import org.junit.BeforeClass;
  import org.junit.Test;
 +import static org.junit.matchers.JUnitMatchers.*;
  import org.mockito.Mockito;
+ import org.mockito.internal.util.reflection.Whitebox;
+-import org.mortbay.jetty.Connector;
 -import org.mortbay.util.ajax.JSON;
++import org.eclipse.jetty.server.Connector;
 +import org.eclipse.jetty.util.ajax.JSON;
  
- public class TestHttpServer extends HttpServerFunctionalTest {
-   static final Log LOG = LogFactory.getLog(TestHttpServer.class);
-@@ -239,7 +240,7 @@ public void run() {
+ import static org.mockito.Mockito.*;
+ 
+@@ -243,7 +244,7 @@ public void run() {
      conn = (HttpURLConnection)servletUrl.openConnection();
      conn.connect();
      assertEquals(200, conn.getResponseCode());
@@ -1041,7 +968,7 @@ index 079bc37..f0e1f17 100644
  
      // We should ignore parameters for mime types - ie a parameter
      // ending in .css should not change mime type
-@@ -247,21 +248,21 @@ public void run() {
+@@ -251,21 +252,21 @@ public void run() {
      conn = (HttpURLConnection)servletUrl.openConnection();
      conn.connect();
      assertEquals(200, conn.getResponseCode());
@@ -1057,43 +984,80 @@ index 079bc37..f0e1f17 100644
 +    assertThat(conn.getContentType().toLowerCase(),both(containsString("text/html")).and(containsString("charset=utf-8")));
  
      // JSPs should default to text/html with utf8
-     servletUrl = new URL(baseUrl, "/testjsp.jsp");
-     conn = (HttpURLConnection)servletUrl.openConnection();
-     conn.connect();
-     assertEquals(200, conn.getResponseCode());
+-    servletUrl = new URL(baseUrl, "/testjsp.jsp");
+-    conn = (HttpURLConnection)servletUrl.openConnection();
+-    conn.connect();
+-    assertEquals(200, conn.getResponseCode());
 -    assertEquals("text/html; charset=utf-8", conn.getContentType());
-+    assertThat(conn.getContentType().toLowerCase(),both(containsString("text/html")).and(containsString("charset=utf-8")));
++//    servletUrl = new URL(baseUrl, "/testjsp.jsp");
++//    conn = (HttpURLConnection)servletUrl.openConnection();
++//    conn.connect();
++//    assertEquals(200, conn.getResponseCode());
++//    assertThat(conn.getContentType().toLowerCase(),both(containsString("text/html")).and(containsString("charset=utf-8")));
    }
  
    /**
-@@ -530,8 +531,8 @@ public void testRequiresAuthorizationAccess() throws Exception {
-       // try to reuse the port
-       port = myServer2.getListenerAddress().getPort();
-       myServer2.stop();
--      assertEquals(-1, myServer2.getPort()); // not bound
--      myServer2.openListener();
-+      assert(myServer2.getPort()==-1 || myServer2.getPort()==-2); // jetty8 has 2 getLocalPort err values
-+      myServer2.start();
-       assertEquals(port, myServer2.getPort()); // expect same port
-     } finally {
-       myServer.stop();
-diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
-index e5fd4b0..51ec303 100644
---- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
-+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
-@@ -76,6 +76,7 @@ public void setup() throws Exception {
- 
-     conf.setInt(HttpServer.HTTP_MAX_THREADS, 10);
-     conf.addResource(CONFIG_SITE_XML);
-+    conf.addResource(conf.get("hadoop.ssl.server.conf","ssl-server.xml"));
-     server = createServer("test", conf);
-     server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
-     server.start();
+@@ -306,7 +307,7 @@ public DummyFilterInitializer() {
+ 
+     @Override
+     public void initFilter(FilterContainer container, Configuration conf) {
+-      container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null);
++      container.addFilter("DummyFilter", DummyServletFilter.class.getName(), new HashMap<String,String>(0));
+     }
+   }
+ 
+diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
+index 09f31df..be80795 100644
+--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
++++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
+@@ -22,6 +22,7 @@
+ import java.io.InputStreamReader;
+ import java.net.URL;
+ import java.net.URLConnection;
++import java.util.HashMap;
+ import java.util.Set;
+ import java.util.TreeSet;
+ 
+@@ -75,7 +76,7 @@ public Initializer() {}
+ 
+       @Override
+       public void initFilter(FilterContainer container, Configuration conf) {
+-        container.addFilter("recording", RecordingFilter.class.getName(), null);
++        container.addFilter("recording", RecordingFilter.class.getName(), new HashMap<String,String>(0));
+       }
+     }
+   }
 diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
-index 3c01320..e9f7ed4 100644
+index 6b17ccc..8f354d3 100644
 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
 +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
-@@ -171,8 +171,7 @@ public void testServletFilterWhenInitThrowsException() throws Exception {
+@@ -22,6 +22,7 @@
+ import java.io.InputStreamReader;
+ import java.net.URL;
+ import java.net.URLConnection;
++import java.util.HashMap;
+ import java.util.Random;
+ 
+ import javax.servlet.Filter;
+@@ -74,7 +75,7 @@ public Initializer() {}
+ 
+       @Override
+       public void initFilter(FilterContainer container, Configuration conf) {
+-        container.addFilter("simple", SimpleFilter.class.getName(), null);
++        container.addFilter("simple", SimpleFilter.class.getName(), new HashMap<String,String>(0));
+       }
+     }
+   }
+@@ -157,7 +158,7 @@ public Initializer() {
+       }
+ 
+       public void initFilter(FilterContainer container, Configuration conf) {
+-        container.addFilter("simple", ErrorFilter.class.getName(), null);
++        container.addFilter("simple", ErrorFilter.class.getName(), new HashMap<String,String>(0));
+       }
+     }
+   }
+@@ -173,8 +174,7 @@ public void testServletFilterWhenInitThrowsException() throws Exception {
        http.start();
        fail("expecting exception");
      } catch (IOException e) {
@@ -1103,6 +1067,15 @@ index 3c01320..e9f7ed4 100644
      }
    }
    
+@@ -189,7 +189,7 @@ public void testContextSpecificServletFilterWhenInitThrowsException()
+     HttpServer2 http = createTestServer(conf);
+     HttpServer2.defineFilter(http.webAppContext,
+         "ErrorFilter", ErrorFilter.class.getName(),
+-        null, null);
++        new HashMap<String,String>(0), null);
+     try {
+       http.start();
+       fail("expecting exception");
 diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
 index f1313e2..52ea9b9 100644
 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
@@ -1147,7 +1120,7 @@ index fe1284f..91c13a8 100644
    }
  }
 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
-index 72f3b7b..7839aae 100644
+index 4134f82..59cb118 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
 +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
 @@ -34,7 +34,7 @@
@@ -1265,19 +1238,6 @@ index 72f3b7b..7839aae 100644
          </exclusion>
          <exclusion>
            <groupId>net.java.dev.jets3t</groupId>
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
-index 02e1a71..621a2fa 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
-+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
-@@ -55,7 +55,7 @@ print "Setting HTTPFS_HOME:          ${HTTPFS_HOME}"
- #
- if [ -e "${HTTPFS_HOME}/bin/httpfs-env.sh" ]; then
-   print "Sourcing:                    ${HTTPFS_HOME}/bin/httpfs-env.sh"
--  source ${HTTPFS_HOME}/bin/HTTPFS-env.sh
-+  source ${HTTPFS_HOME}/bin/httpfs-env.sh
-   grep "^ *export " ${HTTPFS_HOME}/bin/httpfs-env.sh | sed 's/ *export/  setting/'
- fi
- 
 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml
 index a425bdd..39c60f5 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml
@@ -1405,7 +1365,7 @@ index a425bdd..39c60f5 100644
        </Host>
      </Engine>
 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
-index 2ec1fcb..20861b3 100644
+index d512897..b277973 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
 @@ -42,8 +42,8 @@
@@ -1444,7 +1404,7 @@ index e8407fc..7805633 100644
  import java.io.BufferedReader;
  import java.io.File;
 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
-index 6057a48..adf85d5 100644
+index 48cca42..f893127 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
 @@ -56,8 +56,8 @@
@@ -1468,12 +1428,12 @@ index 6057a48..adf85d5 100644
      if (addDelegationTokenAuthHandler) {
        HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority());
 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java
-index 140f866..a42e70d 100644
+index 45ce8ed..6076b1f 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java
 @@ -41,8 +41,8 @@
- import org.json.simple.parser.JSONParser;
  import org.junit.After;
+ import org.junit.Assert;
  import org.junit.Test;
 -import org.mortbay.jetty.Server;
 -import org.mortbay.jetty.webapp.WebAppContext;
@@ -1550,55 +1510,24 @@ index 74d34ec..8b7223a 100644
      URL url = new URL(TestJettyHelper.getJettyURL(), "/bar");
      HttpURLConnection conn = (HttpURLConnection) url.openConnection();
 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
-index 4442281..44cf67e 100644
+index b0f14f4..fb81ab2 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
-@@ -28,7 +28,9 @@
+@@ -28,9 +28,9 @@
  import org.junit.rules.MethodRule;
  import org.junit.runners.model.FrameworkMethod;
  import org.junit.runners.model.Statement;
+-import org.mortbay.jetty.Connector;
 -import org.mortbay.jetty.Server;
+-import org.mortbay.jetty.security.SslSocketConnector;
 +import org.eclipse.jetty.server.Connector;
 +import org.eclipse.jetty.server.Server;
-+import org.eclipse.jetty.server.ServerConnector;
++import org.eclipse.jetty.server.ssl.SslSocketConnector;
  
  public class TestJettyHelper implements MethodRule {
- 
-@@ -73,8 +75,10 @@ private Server createJettyServer() {
-       int port = ss.getLocalPort();
-       ss.close();
-       Server server = new Server(0);
--      server.getConnectors()[0].setHost(host);
--      server.getConnectors()[0].setPort(port);
-+      ServerConnector connector = new ServerConnector(server);
-+      connector.setHost(host);
-+      connector.setPort(port);
-+      server.setConnectors(new Connector[] { connector });
-       return server;
-     } catch (Exception ex) {
-       throw new RuntimeException("Could not stop embedded servlet container, " + ex.getMessage(), ex);
-@@ -90,8 +94,8 @@ public static InetSocketAddress getAuthority() {
-     Server server = getJettyServer();
-     try {
-       InetAddress add =
--        InetAddress.getByName(server.getConnectors()[0].getHost());
--      int port = server.getConnectors()[0].getPort();
-+        InetAddress.getByName(((ServerConnector)server.getConnectors()[0]).getHost());
-+      int port = ((ServerConnector)server.getConnectors()[0]).getPort();
-       return new InetSocketAddress(add, port);
-     } catch (UnknownHostException ex) {
-       throw new RuntimeException(ex);
-@@ -128,7 +132,7 @@ public static URL getJettyURL() {
-       throw new IllegalStateException("This test does not use @TestJetty");
-     }
-     try {
--      return new URL("http://" + server.getConnectors()[0].getHost() + ":" + server.getConnectors()[0].getPort());
-+      return new URL("http://" + ((ServerConnector)server.getConnectors()[0]).getHost() + ":" + ((ServerConnector)server.getConnectors()[0]).getPort());
-     } catch (MalformedURLException ex) {
-       throw new RuntimeException("It should never happen, " + ex.getMessage(), ex);
-     }
+   private boolean ssl;
 diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
-index 13872c3..38d2a14 100644
+index 3a2d8a3..02e8227 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
 +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
 @@ -85,12 +85,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -1628,19 +1557,20 @@ index 13872c3..38d2a14 100644
        <scope>compile</scope>
      </dependency>
      <dependency>
-@@ -180,8 +180,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
+@@ -180,11 +180,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <scope>compile</scope>
      </dependency>
      <dependency>
 -      <groupId>tomcat</groupId>
 -      <artifactId>jasper-runtime</artifactId>
-+      <groupId>org.apache.tomcat</groupId>
-+      <artifactId>tomcat-jasper</artifactId>
+-      <scope>compile</scope>
+-    </dependency>
+-    <dependency>
+       <groupId>xmlenc</groupId>
+       <artifactId>xmlenc</artifactId>
        <scope>compile</scope>
-     </dependency>
-     <dependency>
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
-index f8f2918..b62015e 100644
+index 55b8c6f..a2dc6de 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
 +++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
 @@ -71,12 +71,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -1683,181 +1613,64 @@ index f8f2918..b62015e 100644
        <groupId>junit</groupId>
        <artifactId>junit</artifactId>
        <scope>test</scope>
-@@ -166,8 +156,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
+@@ -166,11 +156,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <scope>compile</scope>
      </dependency>
      <dependency>
 -      <groupId>tomcat</groupId>
 -      <artifactId>jasper-runtime</artifactId>
-+      <groupId>org.apache.tomcat</groupId>
-+      <artifactId>tomcat-jasper</artifactId>
+-      <scope>compile</scope>
+-    </dependency>
+-    <dependency>
+       <groupId>xmlenc</groupId>
+       <artifactId>xmlenc</artifactId>
        <scope>compile</scope>
-     </dependency>
-     <dependency>
-@@ -192,101 +182,77 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
-         </configuration>
-       </plugin>
-       <plugin>
--        <groupId>org.codehaus.mojo.jspc</groupId>
--        <artifactId>jspc-maven-plugin</artifactId>
-+        <groupId>org.eclipse.jetty</groupId>
-+        <artifactId>jetty-jspc-maven-plugin</artifactId>
-         <executions>
-           <execution>
-             <id>hdfs</id>
--            <phase>generate-sources</phase>
-+            <phase>process-classes</phase>
-             <goals>
--              <goal>compile</goal>
-+              <goal>jspc</goal>
-             </goals>
-             <configuration>
--              <compile>false</compile>
-+              <webAppSourceDirectory>${basedir}/src/main/webapps/hdfs</webAppSourceDirectory>
-+              <packageRoot>org.apache.hadoop.hdfs.server.namenode</packageRoot>
-+              <includes>*.jsp</includes>
-               <workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
--              <webFragmentFile>${project.build.directory}/hdfs-jsp-servlet-definitions.xml</webFragmentFile>
--              <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
--              <sources>
--                <directory>${basedir}/src/main/webapps/hdfs</directory>
--                <includes>
--                  <include>*.jsp</include>
--                </includes>
--              </sources>
-+              <webXmlFragment>${project.build.directory}/hdfs-jsp-servlet-definitions.xml</webXmlFragment>
-             </configuration>
-           </execution>
-           <execution>
-             <id>secondary</id>
--            <phase>generate-sources</phase>
-+            <phase>process-classes</phase>
-             <goals>
--              <goal>compile</goal>
-+              <goal>jspc</goal>
-             </goals>
-             <configuration>
--              <compile>false</compile>
-+              <webAppSourceDirectory>${basedir}/src/main/webapps/secondary</webAppSourceDirectory>
-+              <packageRoot>org.apache.hadoop.hdfs.server.namenode</packageRoot>
-+              <includes>*.jsp</includes>
-               <workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
--              <webFragmentFile>${project.build.directory}/secondary-jsp-servlet-definitions.xml</webFragmentFile>
--              <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
--              <sources>
--                <directory>${basedir}/src/main/webapps/secondary</directory>
--                <includes>
--                  <include>*.jsp</include>
--                </includes>
--              </sources>
-+              <webXmlFragment>${project.build.directory}/secondary-jsp-servlet-definitions.xml</webXmlFragment>
-             </configuration>
-           </execution>
-           <execution>
-             <id>journal</id>
--            <phase>generate-sources</phase>
-+            <phase>process-classes</phase>
-             <goals>
--              <goal>compile</goal>
-+              <goal>jspc</goal>
-             </goals>
-             <configuration>
--              <compile>false</compile>
-+              <webAppSourceDirectory>${basedir}/src/main/webapps/journal</webAppSourceDirectory>
-+              <packageRoot>org.apache.hadoop.hdfs.server.journalservice</packageRoot>
-+              <includes>*.jsp</includes>
-               <workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
--              <webFragmentFile>${project.build.directory}/journal-jsp-servlet-definitions.xml</webFragmentFile>
--              <packageName>org.apache.hadoop.hdfs.server.journalservice</packageName>
--              <sources>
--                <directory>${basedir}/src/main/webapps/journal</directory>
--                <includes>
--                  <include>*.jsp</include>
--                </includes>
--              </sources>
-+              <webXmlFragment>${project.build.directory}/journal-jsp-servlet-definitions.xml</webXmlFragment>
-             </configuration>
-           </execution>
-           <execution>
-             <id>datanode</id>
--            <phase>generate-sources</phase>
-+            <phase>process-classes</phase>
-             <goals>
--              <goal>compile</goal>
-+              <goal>jspc</goal>
-             </goals>
-             <configuration>
--              <compile>false</compile>
-+              <webAppSourceDirectory>${basedir}/src/main/webapps/datanode</webAppSourceDirectory>
-+              <packageRoot>org.apache.hadoop.hdfs.server.datanode</packageRoot>
-+              <includes>*.jsp</includes>
-               <workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
--              <webFragmentFile>${project.build.directory}/datanode-jsp-servlet-definitions.xml</webFragmentFile>
--              <packageName>org.apache.hadoop.hdfs.server.datanode</packageName>
--              <sources>
--                <directory>${basedir}/src/main/webapps/datanode</directory>
--                <includes>
--                  <include>*.jsp</include>
--                </includes>
--              </sources>
-+              <webXmlFragment>${project.build.directory}/datanode-jsp-servlet-definitions.xml</webXmlFragment>
-             </configuration>
-           </execution>
-         </executions>
+@@ -280,18 +265,38 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
          <dependencies>
            <dependency>
--            <groupId>org.codehaus.mojo.jspc</groupId>
+             <groupId>org.codehaus.mojo.jspc</groupId>
 -            <artifactId>jspc-compiler-tomcat5</artifactId>
--            <version>2.0-alpha-3</version>
--          </dependency>
--          <dependency>
++            <artifactId>jspc-compiler-tomcat6</artifactId>
+             <version>2.0-alpha-3</version>
++            <exclusions>
++              <exclusion>
++                <groupId>org.apache.tomcat</groupId>
++                <artifactId>*</artifactId>
++              </exclusion>
++            </exclusions>
++          </dependency>
++          <!-- mix of glassfish 2.2 jspc impl & Tomcat's servlet 3.0 api
++               note order of these
++          -->
++          <dependency>
++            <groupId>org.apache.tomcat</groupId>
++            <artifactId>tomcat-servlet-api</artifactId>
++            <version>7.0.37</version>
++          </dependency>
++          <dependency>
++            <groupId>org.apache.tomcat</groupId>
++            <artifactId>tomcat-el-api</artifactId>
++            <version>7.0.37</version>
+           </dependency>
+           <dependency>
 -            <groupId>org.slf4j</groupId>
 -            <artifactId>slf4j-log4j12</artifactId>
 -            <version>1.4.1</version>
--          </dependency>
--          <dependency>
++            <groupId>org.glassfish.web</groupId>
++            <artifactId>javax.servlet.jsp</artifactId>
++            <version>2.2.5</version>
++            <scope>runtime</scope>
+           </dependency>
+           <dependency>
 -            <groupId>org.slf4j</groupId>
 -            <artifactId>jcl104-over-slf4j</artifactId>
 -            <version>1.4.1</version>
-+            <groupId>org.apache.hadoop</groupId>
-+            <artifactId>hadoop-common</artifactId>
-+            <version>${project.version}</version>
-+            <exclusions>
-+              <exclusion>
-+                <groupId>javax.servlet</groupId>
-+                <artifactId>servlet-api</artifactId>
-+               </exclusion>
-+            </exclusions>
++            <groupId>org.codehaus.groovy</groupId>
++            <artifactId>groovy</artifactId>
++            <version>1.8.9</version>
            </dependency>
          </dependencies>
        </plugin>
-@@ -329,7 +295,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
-           </execution>
-           <execution>
-             <id>create-web-xmls</id>
--            <phase>compile</phase>
-+            <phase>process-classes</phase>
-             <goals>
-               <goal>run</goal>
-             </goals>
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
-index 2bcb5a0..1aacc20 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
-@@ -38,12 +38,10 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
- 
-   <dependencyManagement>
-     <dependencies>
--      <!-- This is a really old version of netty, that gets privatized
--           via shading and hence it is not managed via a parent pom -->
-       <dependency>
-         <groupId>org.jboss.netty</groupId>
-         <artifactId>netty</artifactId>
--        <version>3.2.4.Final</version>
-+        <version>3.6.6.Final</version>
-       </dependency>
-     </dependencies>
-   </dependencyManagement>
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java
 index 32b0583..4930816 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java
@@ -1876,24 +1689,11 @@ index 32b0583..4930816 100644
            if (LOG.isDebugEnabled()) {
              LOG.debug("Found " + mostRecentSize + " bookies up, "
                        + "waiting for " + count);
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java
-index 0747f41..42a5417 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java
-@@ -85,7 +85,7 @@
- import org.apache.hadoop.util.DataChecksum;
- import org.apache.hadoop.util.Progressable;
- import org.apache.hadoop.util.Time;
--import org.mortbay.log.Log;
-+import org.eclipse.jetty.util.log.Log;
- 
- import com.google.common.annotations.VisibleForTesting;
- import com.google.common.cache.CacheBuilder;
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
-index 4ed4244..a234292 100644
+index 50b44f8..d5a91d3 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
-@@ -42,7 +42,7 @@
+@@ -46,7 +46,7 @@
  import org.apache.hadoop.util.StringUtils;
  import org.apache.hadoop.util.Tool;
  import org.apache.hadoop.util.ToolRunner;
@@ -1903,145 +1703,43 @@ index 4ed4244..a234292 100644
  import com.google.common.base.Preconditions;
  import com.google.common.collect.Maps;
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
-index 389843c..5aadbfb 100644
+index 2bfedb9..f23f1f1 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
-@@ -86,7 +86,7 @@
+@@ -88,7 +88,7 @@
  import org.apache.hadoop.util.*;
  import org.apache.hadoop.util.DiskChecker.DiskErrorException;
  import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
 -import org.mortbay.util.ajax.JSON;
 +import org.eclipse.jetty.util.ajax.JSON;
  
- import java.io.*;
- import java.net.*;
-@@ -307,7 +307,7 @@ private void startInfoServer(Configuration conf) throws IOException {
-            conf, new AccessControlList(conf.get(DFS_ADMIN, " ")))
-        : new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
-            conf, new AccessControlList(conf.get(DFS_ADMIN, " ")),
--           secureResources.getListener());
-+           secureResources.getListener(), null, secureResources.getServer());
- 
-     LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort);
-     if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) {
+ import javax.management.ObjectName;
+ 
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
-index 0fda306..2e80cfb 100644
+index 477b7f6..8a22654 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
-@@ -32,13 +32,15 @@
- import org.apache.hadoop.http.HttpServer;
+@@ -30,10 +30,11 @@
+ import org.apache.hadoop.http.HttpConfig;
+ import org.apache.hadoop.http.HttpServer2;
  import org.apache.hadoop.security.UserGroupInformation;
- import org.apache.hadoop.security.ssl.SSLFactory;
 -import org.mortbay.jetty.Connector;
--import org.mortbay.jetty.nio.SelectChannelConnector;
--import org.mortbay.jetty.security.SslSocketConnector;
-+import org.eclipse.jetty.server.Server;
-+import org.eclipse.jetty.server.ServerConnector;
-+import org.eclipse.jetty.util.ssl.SslContextFactory;
- 
- import javax.net.ssl.SSLServerSocketFactory;
++import org.eclipse.jetty.server.Connector;
  
  import com.google.common.annotations.VisibleForTesting;
-+import org.eclipse.jetty.util.ssl.SslContextFactory;
-+
  
++
  /**
   * Utility class to start a datanode in a secure cluster, first obtaining 
-@@ -50,17 +52,21 @@
-    */
-   public static class SecureResources {
-     private final ServerSocket streamingSocket;
--    private final Connector listener;
-+    private final ServerConnector listener;
-+    private final Server server;
-     public SecureResources(ServerSocket streamingSocket,
--        Connector listener) {
-+        ServerConnector listener, Server server) {
- 
-       this.streamingSocket = streamingSocket;
-       this.listener = listener;
-+      this.server = server;
-     }
- 
-     public ServerSocket getStreamingSocket() { return streamingSocket; }
- 
--    public Connector getListener() { return listener; }
-+    public ServerConnector getListener() { return listener; }
-+
-+    public Server getServer() { return server; }
-   }
-   
-   private String [] args;
-@@ -94,6 +100,9 @@ public void start() throws Exception {
-   @VisibleForTesting
-   public static SecureResources getSecureResources(final SSLFactory sslFactory,
-                                   Configuration conf) throws Exception {
-+    // Create a server
-+    Server server = HttpServer.createServer(conf);
-+
-     // Obtain secure port for data streaming to datanode
-     InetSocketAddress streamingAddr  = DataNode.getStreamingAddr(conf);
-     int socketWriteTimeout = conf.getInt(DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY,
-@@ -110,22 +119,29 @@ public static SecureResources getSecureResources(final SSLFactory sslFactory,
-     }
- 
-     // Obtain secure listener for web server
--    Connector listener;
-+    ServerConnector listener;
-     if (HttpConfig.isSecure()) {
-       try {
-         sslFactory.init();
-       } catch (GeneralSecurityException ex) {
-         throw new IOException(ex);
-       }
--      SslSocketConnector sslListener = new SslSocketConnector() {
--        @Override
-+      // Jetty 8+ moved JKS config to SslContextFactory
-+      SslContextFactory sslContextFactory = new SslContextFactory(conf.get("ssl.server.keystore.location",""));
-+      sslContextFactory.setKeyStorePassword(conf.get("ssl.server.keystore.password",""));
-+      if (sslFactory.isClientCertRequired()) {
-+        sslContextFactory.setTrustStorePath(conf.get("ssl.server.truststore.location",""));
-+        sslContextFactory.setTrustStorePassword(conf.get("ssl.server.truststore.password",""));
-+        sslContextFactory.setTrustStoreType(conf.get("ssl.server.truststore.type", "jks"));
-+      }
-+      ServerConnector sslListener = new ServerConnector(server, sslContextFactory) {
-         protected SSLServerSocketFactory createFactory() throws Exception {
-           return sslFactory.createSSLServerSocketFactory();
-         }
-       };
-       listener = sslListener;
-     } else {
--      listener = HttpServer.createDefaultChannelConnector();
-+      listener = HttpServer.createDefaultChannelConnector(server);
-     }
- 
-     InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
-@@ -138,7 +154,7 @@ protected SSLServerSocketFactory createFactory() throws Exception {
-           "context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort());
-     }
-     System.err.println("Successfully obtained privileged resources (streaming port = "
--        + ss + " ) (http listener port = " + listener.getConnection() +")");
-+        + ss + " ) (http listener port = " + listener.getLocalPort() +")");
-     
-     if ((ss.getLocalPort() > 1023 || listener.getPort() > 1023) &&
-         UserGroupInformation.isSecurityEnabled()) {
-@@ -146,7 +162,7 @@ protected SSLServerSocketFactory createFactory() throws Exception {
-     }
-     System.err.println("Opened streaming server at " + streamingAddr);
-     System.err.println("Opened info server at " + infoSocAddr);
--    return new SecureResources(ss, listener);
-+    return new SecureResources(ss, listener, server);
-   }
- 
- }
+  * privileged resources before main startup and handing them to the datanode.
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
-index 856312f..d7a14a4 100644
+index a5b40a0..8a425ef 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
-@@ -228,7 +228,7 @@
- import org.apache.hadoop.util.DataChecksum;
- import org.apache.hadoop.util.Time;
- import org.apache.hadoop.util.VersionInfo;
+@@ -264,7 +264,7 @@
+ import org.apache.log4j.Appender;
+ import org.apache.log4j.AsyncAppender;
+ import org.apache.log4j.Logger;
 -import org.mortbay.util.ajax.JSON;
 +import org.eclipse.jetty.util.ajax.JSON;
  
@@ -2061,10 +1759,10 @@ index aa4ba5d..5b945ba 100644
  @InterfaceAudience.Private
  public class StreamFile extends DfsServlet {
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
-index 9fb6dfd..6246091 100644
+index afae1a3..78c9122 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
-@@ -30,7 +30,7 @@
+@@ -32,7 +32,7 @@
  import org.apache.hadoop.security.token.TokenIdentifier;
  import org.apache.hadoop.util.DataChecksum;
  import org.apache.hadoop.util.StringUtils;
@@ -2074,18 +1772,18 @@ index 9fb6dfd..6246091 100644
  import java.io.ByteArrayInputStream;
  import java.io.DataInputStream;
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
-index 4c5790d..6d0ddf2 100644
+index 855545d..2a1b808 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
-@@ -102,7 +102,7 @@
- import org.apache.hadoop.security.token.TokenRenewer;
- import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
+@@ -98,7 +98,7 @@
+ import org.apache.hadoop.security.token.Token;
+ import org.apache.hadoop.security.token.TokenIdentifier;
  import org.apache.hadoop.util.Progressable;
 -import org.mortbay.util.ajax.JSON;
 +import org.eclipse.jetty.util.ajax.JSON;
  
- import com.google.common.annotations.VisibleForTesting;
  import com.google.common.base.Charsets;
+ import com.google.common.collect.Lists;
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java
 index 3471848..b4e0202 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java
@@ -2099,61 +1797,25 @@ index 3471848..b4e0202 100644
  
  /**
   * Test {@link JournalNodeMXBean}
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestDatanodeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestDatanodeManager.java
-index 7503493..0561fcf 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestDatanodeManager.java
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestDatanodeManager.java
-@@ -30,7 +30,7 @@
- import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java
+index db8f92e..79d9003 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java
+@@ -28,7 +28,7 @@
+ import org.apache.hadoop.conf.Configuration;
+ import org.apache.hadoop.hdfs.MiniDFSCluster;
  import org.junit.Test;
- import org.mockito.Mockito;
--import org.mortbay.log.Log;
-+import org.eclipse.jetty.util.log.Log;
- 
- import static org.junit.Assert.*;
- 
-@@ -57,7 +57,7 @@ public void testNumVersionsReportedCorrect() throws IOException {
-     Random rng = new Random();
-     int seed = rng.nextInt();
-     rng = new Random(seed);
--    Log.info("Using seed " + seed + " for testing");
-+    Log.getRootLogger().info("Using seed " + seed + " for testing");
+-import org.mortbay.util.ajax.JSON;
++import org.eclipse.jetty.util.ajax.JSON;
  
-     //A map of the Storage IDs to the DN registration it was registered with
-     HashMap <String, DatanodeRegistration> sIdToDnReg =
-@@ -76,7 +76,7 @@ public void testNumVersionsReportedCorrect() throws IOException {
-           it.next();
-         }
-         DatanodeRegistration toRemove = it.next().getValue();
--        Log.info("Removing node " + toRemove.getStorageID() + " ip " +
-+        Log.getRootLogger().info("Removing node " + toRemove.getStorageID() + " ip " +
-         toRemove.getXferAddr() + " version : " + toRemove.getSoftwareVersion());
- 
-         //Remove that random node
-@@ -110,7 +110,7 @@ public void testNumVersionsReportedCorrect() throws IOException {
-         Mockito.when(dr.getSoftwareVersion()).thenReturn(
-           "version" + rng.nextInt(5));
- 
--        Log.info("Registering node storageID: " + dr.getStorageID() +
-+        Log.getRootLogger().info("Registering node storageID: " + dr.getStorageID() +
-           ", version: " + dr.getSoftwareVersion() + ", IP address: "
-           + dr.getXferAddr());
- 
-@@ -136,7 +136,7 @@ public void testNumVersionsReportedCorrect() throws IOException {
-         }
-       }
-       for(Entry <String, Integer> entry: mapToCheck.entrySet()) {
--        Log.info("Still in map: " + entry.getKey() + " has "
-+        Log.getRootLogger().info("Still in map: " + entry.getKey() + " has "
-           + entry.getValue());
-       }
-       assertEquals("The map of version counts returned by DatanodeManager was"
+ /**
+  * Class for testing {@link NameNodeMXBean} implementation
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
-index 2d9a70c..d6e6fab 100644
+index d459d30..6327a83 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
-@@ -34,7 +34,7 @@
- import org.apache.hadoop.hdfs.MiniDFSCluster;
+@@ -37,7 +37,7 @@
+ import org.apache.hadoop.io.nativeio.NativeIO.POSIX.NoMlockCacheManipulator;
  import org.apache.hadoop.util.VersionInfo;
  import org.junit.Test;
 -import org.mortbay.util.ajax.JSON;
@@ -2162,7 +1824,7 @@ index 2d9a70c..d6e6fab 100644
  /**
   * Class for testing {@link NameNodeMXBean} implementation
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
-index 544f44f..b33f97e 100644
+index 0f22e9a..bff549a 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
 @@ -36,7 +36,7 @@
@@ -2175,7 +1837,7 @@ index 544f44f..b33f97e 100644
  public class TestStartupProgressServlet {
  
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
-index daaa6d8..683f414 100644
+index f24b801..28d05b4 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
 @@ -46,7 +46,7 @@
@@ -2188,18 +1850,18 @@ index daaa6d8..683f414 100644
  /*
   * Mock input stream class that always outputs the current position of the stream. 
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
-index 5460047..3b691db 100644
+index 2bce30f..eaf836d 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
 +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
-@@ -28,7 +28,7 @@
+@@ -38,7 +38,7 @@
  import org.apache.hadoop.util.Time;
  import org.junit.Assert;
  import org.junit.Test;
 -import org.mortbay.util.ajax.JSON;
 +import org.eclipse.jetty.util.ajax.JSON;
  
- public class TestJsonUtil {
-   static FileStatus toFileStatus(HdfsFileStatus f, String parent) {
+ import com.google.common.collect.Lists;
+ 
 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
 index 7029f42..c7023c9 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
@@ -2214,7 +1876,7 @@ index 7029f42..c7023c9 100644
  /**
   * This class drives the creation of a mini-cluster on the local machine. By
 diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
-index 981e6ff..394810c 100644
+index 981e6ff..7864756 100644
 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
 +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
 @@ -30,7 +30,7 @@
@@ -2226,84 +1888,6 @@ index 981e6ff..394810c 100644
  
  /**
   * <p>This class handles job end notification. Submitters of jobs can choose to
-@@ -102,10 +102,10 @@ public void setConf(Configuration conf) {
-         int port = Integer.parseInt(portConf);
-         proxyToUse = new Proxy(proxyType,
-           new InetSocketAddress(hostname, port));
--        Log.info("Job end notification using proxy type \"" + proxyType + 
-+        Log.getRootLogger().info("Job end notification using proxy type \"" + proxyType + 
-         "\" hostname \"" + hostname + "\" and port \"" + port + "\"");
-       } catch(NumberFormatException nfe) {
--        Log.warn("Job end notification couldn't parse configured proxy's port "
-+        Log.getRootLogger().warn("Job end notification couldn't parse configured proxy's port "
-           + portConf + ". Not going to use a proxy");
-       }
-     }
-@@ -122,23 +122,23 @@ public Configuration getConf() {
-   protected boolean notifyURLOnce() {
-     boolean success = false;
-     try {
--      Log.info("Job end notification trying " + urlToNotify);
-+      Log.getRootLogger().info("Job end notification trying " + urlToNotify);
-       HttpURLConnection conn =
-         (HttpURLConnection) urlToNotify.openConnection(proxyToUse);
-       conn.setConnectTimeout(timeout);
-       conn.setReadTimeout(timeout);
-       conn.setAllowUserInteraction(false);
-       if(conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
--        Log.warn("Job end notification to " + urlToNotify +" failed with code: "
-+        Log.getRootLogger().warn("Job end notification to " + urlToNotify +" failed with code: "
-         + conn.getResponseCode() + " and message \"" + conn.getResponseMessage()
-         +"\"");
-       }
-       else {
-         success = true;
--        Log.info("Job end notification to " + urlToNotify + " succeeded");
-+        Log.getRootLogger().info("Job end notification to " + urlToNotify + " succeeded");
-       }
-     } catch(IOException ioe) {
--      Log.warn("Job end notification to " + urlToNotify + " failed", ioe);
-+      Log.getRootLogger().warn("Job end notification to " + urlToNotify + " failed", ioe);
-     }
-     return success;
-   }
-@@ -153,7 +153,7 @@ public void notify(JobReport jobReport)
-     throws InterruptedException {
-     // Do we need job-end notification?
-     if (userUrl == null) {
--      Log.info("Job end notification URL not set, skipping.");
-+      Log.getRootLogger().info("Job end notification URL not set, skipping.");
-       return;
-     }
- 
-@@ -169,23 +169,23 @@ public void notify(JobReport jobReport)
-     try {
-       urlToNotify = new URL(userUrl);
-     } catch (MalformedURLException mue) {
--      Log.warn("Job end notification couldn't parse " + userUrl, mue);
-+      Log.getRootLogger().warn("Job end notification couldn't parse " + userUrl, mue);
-       return;
-     }
- 
-     // Send notification
-     boolean success = false;
-     while (numTries-- > 0 && !success) {
--      Log.info("Job end notification attempts left " + numTries);
-+      Log.getRootLogger().info("Job end notification attempts left " + numTries);
-       success = notifyURLOnce();
-       if (!success) {
-         Thread.sleep(waitInterval);
-       }
-     }
-     if (!success) {
--      Log.warn("Job end notification failed to notify : " + urlToNotify);
-+      Log.getRootLogger().warn("Job end notification failed to notify : " + urlToNotify);
-     } else {
--      Log.info("Job end notification succeeded for " + jobReport.getJobId());
-+      Log.getRootLogger().info("Job end notification succeeded for " + jobReport.getJobId());
-     }
-   }
- }
 diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
 index 8891ec7..1dd369a 100644
 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
@@ -2354,10 +1938,10 @@ index 8891ec7..1dd369a 100644
  
    @Test
 diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
-index d2ea74e..32d6b0e 100644
+index d2ea74e..d986fdc 100644
 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
 +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
-@@ -18,9 +18,10 @@
+@@ -18,9 +18,9 @@
  
  package org.apache.hadoop.mapred;
  
@@ -2365,13 +1949,12 @@ index d2ea74e..32d6b0e 100644
 -import org.mortbay.jetty.servlet.Context;
 -import org.mortbay.jetty.servlet.ServletHolder;
 +import org.eclipse.jetty.server.Server;
-+import org.eclipse.jetty.server.ServerConnector;
 +import org.eclipse.jetty.servlet.ServletContextHandler;
 +import org.eclipse.jetty.servlet.ServletHolder;
  import org.apache.hadoop.fs.Path;
  import org.apache.hadoop.fs.FileSystem;
  import org.apache.hadoop.io.Text;
-@@ -69,7 +70,7 @@ private void startHttpServer() throws Exception {
+@@ -69,7 +69,7 @@ private void startHttpServer() throws Exception {
      }
      webServer = new Server(0);
  
@@ -2380,17 +1963,8 @@ index d2ea74e..32d6b0e 100644
  
      // create servlet handler
      context.addServlet(new ServletHolder(new NotificationServlet()),
-@@ -77,7 +78,7 @@ private void startHttpServer() throws Exception {
- 
-     // Start webServer
-     webServer.start();
--    port = webServer.getConnectors()[0].getLocalPort();
-+    port = ((ServerConnector) webServer.getConnectors()[0]).getLocalPort();
- 
-   }
- 
 diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
-index 35b5e30..91964bd 100644
+index 2e8ba5e..3cc73b5 100644
 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
 +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
 @@ -45,7 +45,7 @@
@@ -2402,8 +1976,34 @@ index 35b5e30..91964bd 100644
  
  /**
   * This class drives the creation of a mini-cluster on the local machine. By
+diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
+index c803a7f..393d385 100644
+--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
++++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
+@@ -111,7 +111,7 @@
+ import org.jboss.netty.handler.ssl.SslHandler;
+ import org.jboss.netty.handler.stream.ChunkedWriteHandler;
+ import org.jboss.netty.util.CharsetUtil;
+-import org.mortbay.jetty.HttpHeaders;
++import org.eclipse.jetty.http.HttpHeaders;
+ 
+ import com.google.common.base.Charsets;
+ import com.google.common.util.concurrent.ThreadFactoryBuilder;
+diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
+index 420c428..3a3257e 100644
+--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
++++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
+@@ -78,7 +78,7 @@
+ import org.jboss.netty.handler.codec.http.HttpResponseStatus;
+ import org.junit.Assert;
+ import org.junit.Test;
+-import org.mortbay.jetty.HttpHeaders;
++import org.eclipse.jetty.http.HttpHeaders;
+ 
+ public class TestShuffleHandler {
+   static final long MiB = 1024 * 1024; 
 diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
-index 18fbb74..c71db66 100644
+index ebf5014..e2fab7e 100644
 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
 +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
 @@ -43,8 +43,8 @@
@@ -2436,23 +2036,11 @@ index 18fbb74..c71db66 100644
          </exclusion>
        </exclusions>
      </dependency>
-@@ -158,6 +150,11 @@
-       <artifactId>commons-lang</artifactId>
-       <scope>provided</scope>
-     </dependency>
-+    <dependency>
-+      <groupId>commons-collections</groupId>
-+      <artifactId>commons-collections</artifactId>
-+      <scope>provided</scope>
-+    </dependency>
-   </dependencies>
-  
-   <build>
 diff --git a/hadoop-mapreduce-project/pom.xml b/hadoop-mapreduce-project/pom.xml
-index 11640d7..6290e72 100644
+index c41aac5..90a741f 100644
 --- a/hadoop-mapreduce-project/pom.xml
 +++ b/hadoop-mapreduce-project/pom.xml
-@@ -53,8 +53,8 @@
+@@ -52,8 +52,8 @@
        <artifactId>avro</artifactId>
        <exclusions>
          <exclusion>
@@ -2463,7 +2051,7 @@ index 11640d7..6290e72 100644
          </exclusion>
          <exclusion>
            <groupId>org.apache.ant</groupId>
-@@ -88,16 +88,8 @@
+@@ -87,16 +87,8 @@
            <artifactId>commons-el</artifactId>
          </exclusion>
          <exclusion>
@@ -2483,10 +2071,10 @@ index 11640d7..6290e72 100644
        </exclusions>
      </dependency>
 diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
-index b7a7456..d7cc299 100644
+index 272dadc..48e17a9 100644
 --- a/hadoop-project/pom.xml
 +++ b/hadoop-project/pom.xml
-@@ -63,7 +63,7 @@
+@@ -59,7 +59,7 @@
      <avro.version>1.7.4</avro.version>
  
      <!-- jersey version -->
@@ -2495,18 +2083,7 @@ index b7a7456..d7cc299 100644
  
      <!-- ProtocolBuffer version, used to verify the protoc version and -->
      <!-- define the protobuf JAR version                               -->
-@@ -311,8 +311,8 @@
-       </dependency>
-       <dependency>
-         <groupId>org.apache.commons</groupId>
--        <artifactId>commons-math</artifactId>
--        <version>2.1</version>
-+        <artifactId>commons-math3</artifactId>
-+        <version>3.1.1</version>
-       </dependency>
-       <dependency>
-         <groupId>org.apache.commons</groupId>
-@@ -342,23 +342,17 @@
+@@ -360,29 +360,17 @@
        <dependency>
          <groupId>javax.servlet</groupId>
          <artifactId>servlet-api</artifactId>
@@ -2525,31 +2102,24 @@ index b7a7456..d7cc299 100644
 -        </exclusions>
 +        <groupId>org.eclipse.jetty</groupId>
 +        <artifactId>jetty-server</artifactId>
-+        <version>9.0.4.v20130625</version>
++        <version>8.1.14.v20131031</version>
        </dependency>
        <dependency>
 -        <groupId>org.mortbay.jetty</groupId>
 +        <groupId>org.eclipse.jetty</groupId>
          <artifactId>jetty-util</artifactId>
 -        <version>6.1.26</version>
-+        <version>9.0.4.v20130625</version>
-       </dependency>
- 
-       <dependency>
-@@ -368,6 +362,12 @@
+-      </dependency>
+-
+-      <dependency>
+-        <groupId>org.glassfish</groupId>
+-        <artifactId>javax.servlet</artifactId>
+-        <version>3.1</version>
++        <version>8.1.14.v20131031</version>
        </dependency>
  
        <dependency>
-+        <groupId>org.glassfish.web</groupId>
-+        <artifactId>javax.servlet.jsp</artifactId>
-+        <version>2.2.6</version>
-+      </dependency>
-+
-+      <dependency>
-         <groupId>org.codehaus.plexus</groupId>
-         <artifactId>plexus-utils</artifactId>
-         <version>2.0.5</version>
-@@ -404,12 +404,23 @@
+@@ -422,6 +410,11 @@
          <artifactId>jersey-server</artifactId>
          <version>${jersey.version}</version>
        </dependency>
@@ -2561,28 +2131,7 @@ index b7a7456..d7cc299 100644
  
        <dependency>
          <groupId>com.google.inject</groupId>
-         <artifactId>guice</artifactId>
-         <version>3.0</version>
-       </dependency>
-+      
-+      <dependency>
-+        <groupId>cglib</groupId>
-+        <artifactId>cglib</artifactId>
-+        <version>2.2</version>
-+      </dependency>
- 
-       <dependency>
-         <groupId>com.google.inject.extensions</groupId>
-@@ -438,7 +449,7 @@
-       <dependency>
-         <groupId>io.netty</groupId>
-         <artifactId>netty</artifactId>
--        <version>3.6.2.Final</version>
-+        <version>3.6.6.Final</version>
-       </dependency>
- 
-       <dependency>
-@@ -448,14 +459,9 @@
+@@ -472,34 +465,22 @@
        </dependency>
  
        <dependency>
@@ -2594,59 +2143,42 @@ index b7a7456..d7cc299 100644
 -        <groupId>tomcat</groupId>
 -        <artifactId>jasper-compiler</artifactId>
 -        <version>5.5.23</version>
-+        <groupId>org.apache.tomcat</groupId>
-+        <artifactId>tomcat-jasper</artifactId>
-+        <version>7.0.37</version>
-         <exclusions>
-           <exclusion>
-             <groupId>javax.servlet</groupId>
-@@ -465,17 +471,16 @@
-             <groupId>ant</groupId>
-             <artifactId>ant</artifactId>
-           </exclusion>
-+          <exclusion>
-+             <groupId>org.eclipse.jdt.core.compiler</groupId>
-+             <artifactId>ecj</artifactId>
-+          </exclusion>
-         </exclusions>
+-        <exclusions>
+-          <exclusion>
+-            <groupId>javax.servlet</groupId>
+-            <artifactId>jsp-api</artifactId>
+-          </exclusion>
+-          <exclusion>
+-            <groupId>ant</groupId>
+-            <artifactId>ant</artifactId>
+-          </exclusion>
+-        </exclusions>
++        <groupId>org.eclipse.jetty</groupId>
++        <artifactId>test-jetty-servlet</artifactId>
++        <version>8.1.14.v20131031</version>
        </dependency>
++      <!-- mix of glassfish 2.2 jspc & Tomcat's servlet 3.0 api
++           note order of these
++      -->
        <dependency>
 -        <groupId>tomcat</groupId>
 -        <artifactId>jasper-runtime</artifactId>
 -        <version>5.5.23</version>
--      </dependency>
--      <dependency>
++        <groupId>org.apache.tomcat</groupId>
++        <artifactId>tomcat-servlet-api</artifactId>
++        <version>7.0.37</version>
+       </dependency>
+       <dependency>
 -        <groupId>javax.servlet.jsp</groupId>
 -        <artifactId>jsp-api</artifactId>
 -        <version>2.1</version>
-+        <groupId>org.eclipse.jetty</groupId>
-+        <artifactId>test-jetty-servlet</artifactId>
-+        <version>9.0.4.v20130625</version>
++        <groupId>org.glassfish.web</groupId>
++        <artifactId>javax.servlet.jsp</artifactId>
++        <version>2.2.5</version>
        </dependency>
        <dependency>
          <groupId>commons-el</groupId>
-@@ -483,6 +488,11 @@
-         <version>1.0</version>
-       </dependency>
-       <dependency>
-+        <groupId>org.eclipse.jetty</groupId>
-+        <artifactId>jetty-jspc-maven-plugin</artifactId>
-+        <version>9.0.4.v20130625</version>
-+      </dependency>
-+      <dependency>
-         <groupId>commons-logging</groupId>
-         <artifactId>commons-logging</artifactId>
-         <version>1.1.1</version>
-@@ -536,7 +546,7 @@
-       <dependency>
-         <groupId>net.java.dev.jets3t</groupId>
-         <artifactId>jets3t</artifactId>
--        <version>0.6.1</version>
-+        <version>0.9.0</version>
-       </dependency>
-       <dependency>
-         <groupId>org.apache.mina</groupId>
-@@ -704,7 +714,7 @@
+@@ -728,7 +709,7 @@
        <dependency>
          <groupId>org.apache.bookkeeper</groupId>
          <artifactId>bookkeeper-server</artifactId>
@@ -2655,65 +2187,152 @@ index b7a7456..d7cc299 100644
          <scope>compile</scope>
        </dependency>
        <dependency>
-@@ -712,6 +722,11 @@
-         <artifactId>hsqldb</artifactId>
-         <version>2.0.0</version>
-       </dependency>
-+      <dependency>
-+        <groupId>com.google.code.findbugs</groupId>
-+        <artifactId>jsr305</artifactId>
-+        <version>1.3.9</version>
-+      </dependency>
-     </dependencies>
-   </dependencyManagement>
- 
-@@ -797,9 +812,9 @@
-           <version>${avro.version}</version>
-         </plugin>
-         <plugin>
--          <groupId>org.codehaus.mojo.jspc</groupId>
--          <artifactId>jspc-maven-plugin</artifactId>
--          <version>2.0-alpha-3</version>
-+          <groupId>org.eclipse.jetty</groupId>
-+          <artifactId>jetty-jspc-maven-plugin</artifactId>
-+          <version>9.0.4.v20130625</version>
-         </plugin>
-         <plugin>
-           <groupId>org.apache.maven.plugins</groupId>
-diff --git a/hadoop-tools/hadoop-extras/pom.xml b/hadoop-tools/hadoop-extras/pom.xml
-index cd87e04..0810b34 100644
---- a/hadoop-tools/hadoop-extras/pom.xml
-+++ b/hadoop-tools/hadoop-extras/pom.xml
-@@ -89,6 +89,11 @@
-       <type>test-jar</type>
-       <scope>test</scope>
+diff --git a/hadoop-tools/hadoop-sls/pom.xml b/hadoop-tools/hadoop-sls/pom.xml
+index b6d5dbe..7729ec3 100644
+--- a/hadoop-tools/hadoop-sls/pom.xml
++++ b/hadoop-tools/hadoop-sls/pom.xml
+@@ -55,18 +55,12 @@
+       <scope>compile</scope>
      </dependency>
-+    <dependency>
-+       <groupId>cglib</groupId>
-+       <artifactId>cglib</artifactId>
-+       <scope>test</scope>
-+    </dependency>
-   </dependencies>
- 
-   <build>
-diff --git a/hadoop-tools/hadoop-streaming/pom.xml b/hadoop-tools/hadoop-streaming/pom.xml
-index 4a39cfa..fede1b6 100644
---- a/hadoop-tools/hadoop-streaming/pom.xml
-+++ b/hadoop-tools/hadoop-streaming/pom.xml
-@@ -95,6 +95,11 @@
-       <type>test-jar</type>
-       <scope>test</scope>
+     <dependency>
+-      <groupId>org.mortbay.jetty</groupId>
+-      <artifactId>jetty</artifactId>
++      <groupId>org.eclipse.jetty</groupId>
++      <artifactId>jetty-server</artifactId>
+       <scope>provided</scope>
+-      <exclusions>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>servlet-api</artifactId>
+-        </exclusion>
+-      </exclusions>
+     </dependency>
+     <dependency>
+-      <groupId>org.mortbay.jetty</groupId>
++      <groupId>org.eclipse.jetty</groupId>
+       <artifactId>jetty-util</artifactId>
+       <scope>provided</scope>
+     </dependency>
+diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
+index 123ccea..e961e58 100644
+--- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
++++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
+@@ -32,10 +32,11 @@
+ import org.apache.commons.io.FileUtils;
+ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event
+         .SchedulerEventType;
+-import org.mortbay.jetty.Handler;
+-import org.mortbay.jetty.Server;
+-import org.mortbay.jetty.handler.AbstractHandler;
+-import org.mortbay.jetty.Request;
++import org.eclipse.jetty.server.Handler;
++import org.eclipse.jetty.server.Server;
++import org.eclipse.jetty.server.Request;
++import org.eclipse.jetty.server.handler.AbstractHandler;
++import org.eclipse.jetty.server.handler.ResourceHandler;
+ 
+ import org.apache.hadoop.yarn.sls.SLSRunner;
+ import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics;
+@@ -45,7 +46,6 @@
+ import com.codahale.metrics.Gauge;
+ import com.codahale.metrics.Histogram;
+ import com.codahale.metrics.MetricRegistry;
+-import org.mortbay.jetty.handler.ResourceHandler;
+ 
+ public class SLSWebApp extends HttpServlet {
+   private static final long serialVersionUID = 1905162041950251407L;
+@@ -108,8 +108,9 @@ public void start() throws Exception {
+ 
+     Handler handler = new AbstractHandler() {
+       @Override
+-      public void handle(String target, HttpServletRequest request,
+-                         HttpServletResponse response, int dispatch) {
++      public void handle(String target, Request baseRequest,
++                         HttpServletRequest request,
++                         HttpServletResponse response) {
+         try{
+           // timeunit
+           int timeunit = 1000;   // second, divide millionsecond / 1000
+@@ -131,7 +132,7 @@ public void handle(String target, HttpServletRequest request,
+             // js/css request
+             if (target.startsWith("/js") || target.startsWith("/css")) {
+               response.setCharacterEncoding("utf-8");
+-              staticHandler.handle(target, request, response, dispatch);
++              staticHandler.handle(target, baseRequest, request, response);
+             } else
+               // json request
+               if (target.equals("/simulateMetrics")) {
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml
+index 9e8bf05..a5a280a 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml
+@@ -64,10 +64,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
      </dependency>
-+    <dependency>
-+      <groupId>cglib</groupId>
-+      <artifactId>cglib</artifactId>
-+      <scope>test</scope>
-+    </dependency>
-   </dependencies>
  
-   <build>
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
+index 2942e71..7b9bb5d 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
+@@ -51,10 +51,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
+     </dependency>
+ 
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml
+index 7179262..1fffa89 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml
+@@ -63,10 +63,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
+     </dependency>
+     <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml
+index ac7d470..c60a9d1 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml
+@@ -48,10 +48,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
+     </dependency>
+ 
+@@ -76,7 +72,7 @@
+       <artifactId>log4j</artifactId>
+     </dependency>
+     <dependency>
+-      <groupId>org.mortbay.jetty</groupId>
++      <groupId>org.eclipse.jetty</groupId>
+       <artifactId>jetty-util</artifactId>
+     </dependency>
+     <dependency>
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java
-index 58ef215..fc6bee1 100644
+index 08e71c1..461c43c 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java
 @@ -83,7 +83,7 @@
@@ -2725,59 +2344,114 @@ index 58ef215..fc6bee1 100644
  
  public class TestAMRMClient {
    static Configuration conf = null;
-@@ -436,7 +436,7 @@ public void testAMRMClientMatchStorage() throws YarnException, IOException {
-       int iterationsLeft = 3;
-       while (allocatedContainerCount < 2
-           && iterationsLeft-- > 0) {
--        Log.info(" == alloc " + allocatedContainerCount + " it left " + iterationsLeft);
-+        Log.getRootLogger().info(" == alloc " + allocatedContainerCount + " it left " + iterationsLeft);
-         AllocateResponse allocResponse = amClient.allocate(0.1f);
-         assertTrue(amClient.ask.size() == 0);
-         assertTrue(amClient.release.size() == 0);
-@@ -604,7 +604,7 @@ private int getAllocatedContainersNumber(
-       throws YarnException, IOException {
-     int allocatedContainerCount = 0;
-     while (iterationsLeft-- > 0) {
--      Log.info(" == alloc " + allocatedContainerCount + " it left " + iterationsLeft);
-+      Log.getRootLogger().info(" == alloc " + allocatedContainerCount + " it left " + iterationsLeft);
-       AllocateResponse allocResponse = amClient.allocate(0.1f);
-       assertTrue(amClient.ask.size() == 0);
-       assertTrue(amClient.release.size() == 0);
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
+index 1efb54c..1b3463b 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
+@@ -62,7 +62,7 @@
+ import org.apache.hadoop.yarn.util.Records;
+ import org.junit.Before;
+ import org.junit.Test;
+-import org.mortbay.log.Log;
++import org.eclipse.jetty.util.log.Log;
+ 
+ import org.apache.commons.cli.Options;
+ 
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+index ece280f..0407643 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+@@ -51,10 +51,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
+     </dependency>
+ 
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
-index d2ce2f2..2d4b9f1 100644
+index f8c6f55..71df06b 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
-@@ -219,7 +219,7 @@ public void setup() {
-         HttpServer server =
-             new HttpServer(name, bindAddress, port, findPort, conf,
-                 new AdminACLsManager(conf).getAdminAcl(), null,
--                pathList.toArray(new String[0])) {
-+                pathList.toArray(new String[0]), null) {
- 
-               {
-                 if (UserGroupInformation.isSecurityEnabled()) {
+@@ -266,7 +266,8 @@ public void setup() {
+           server.setAttribute(entry.getKey(), entry.getValue());
+         }
+         HttpServer2.defineFilter(server.getWebAppContext(), "guice",
+-          GuiceFilter.class.getName(), null, new String[] { "/*" });
++          GuiceFilter.class.getName(), new HashMap<String,String>(0),
++           new String[] { "/*" });
+ 
+         webapp.setConf(conf);
+         webapp.setHttpServer(server);
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
+index a933f41..c240daa 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
+@@ -58,10 +58,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
+     </dependency>
+ 
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml
+index 93495d4..d88077b 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml
+@@ -51,10 +51,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
+     </dependency>
+     <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+index 6d87785..17bce8e 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+@@ -53,10 +53,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
+     </dependency>
+ 
+@@ -99,7 +95,7 @@
+       <artifactId>jersey-client</artifactId>
+     </dependency>
+     <dependency>
+-      <groupId>org.mortbay.jetty</groupId>
++      <groupId>org.eclipse.jetty</groupId>
+       <artifactId>jetty-util</artifactId>
+     </dependency>
+     <dependency>
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
-index 452a823..612eba1 100644
+index 7d2948e..81e51c3 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
-@@ -57,7 +57,7 @@
+@@ -43,6 +43,7 @@
  import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
  import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.PRE;
  import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
--import org.mortbay.log.Log;
 +import org.eclipse.jetty.util.log.Log;
  
  import com.google.inject.Inject;
  
-@@ -328,7 +328,7 @@ private void printLogs(Block html, ContainerId containerId,
-         try {
-           logDir = new URI(logDir).getPath();
-         } catch (URISyntaxException e) {
--          Log.warn(e.getMessage());
-+          Log.getRootLogger().warn(e.getMessage());
-         }
-         String appIdStr = ConverterUtils.toString(containerId
-             .getApplicationAttemptId().getApplicationId());
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
 index bfb0e87..f9fac8e 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
@@ -2792,7 +2466,7 @@ index bfb0e87..f9fac8e 100644
  
  
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
-index d60d583..034bc4e 100644
+index 72c1f6f..d272614 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
 @@ -176,7 +176,7 @@ public void testNodeAppsNone() throws JSONException, Exception {
@@ -2823,7 +2497,7 @@ index d60d583..034bc4e 100644
  
    @Test
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
-index 95016c2..310e18b 100644
+index 29c9253..56ca16e 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
 @@ -183,7 +183,7 @@ public void testNodeContainersNone() throws JSONException, Exception {
@@ -2835,8 +2509,32 @@ index 95016c2..310e18b 100644
    }
  
    private HashMap<String, String> addAppContainers(Application app) 
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
+index 1ff750b..c628621 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
+@@ -55,10 +55,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
+         </exclusion>
+-        <exclusion>
+-          <groupId>org.mortbay.jetty</groupId>
+-          <artifactId>jsp-2.1-jetty</artifactId>
+-        </exclusion>
+       </exclusions>
+     </dependency>
+ 
+@@ -161,7 +157,7 @@
+       <artifactId>jersey-client</artifactId>
+     </dependency>
+     <dependency>
+-      <groupId>org.mortbay.jetty</groupId>
++      <groupId>org.eclipse.jetty</groupId>
+       <artifactId>jetty-util</artifactId>
+     </dependency>
+     <dependency>
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMNMInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMNMInfo.java
-index 5f7002c..945cd29 100644
+index ef4a0d4..f96879e 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMNMInfo.java
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMNMInfo.java
 @@ -33,7 +33,7 @@
@@ -2848,8 +2546,21 @@ index 5f7002c..945cd29 100644
  
  /**
   * JMX bean listing statuses of all node managers.
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java
+index 1dcac06..6ecc80d 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java
+@@ -43,7 +43,7 @@
+ import org.apache.hadoop.yarn.server.utils.BuilderUtils;
+ import org.apache.hadoop.yarn.util.Records;
+ import org.apache.hadoop.yarn.util.YarnVersionInfo;
+-import org.mortbay.log.Log;
++import org.eclipse.jetty.util.log.Log;
+ 
+ public class MockNM {
+ 
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesApps.java
-index 118a621..9dc50d0 100644
+index 45b3803..2b79c2c 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesApps.java
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesApps.java
 @@ -376,7 +376,7 @@ public void testAppsQueryStateNone() throws JSONException, Exception {
@@ -2880,7 +2591,7 @@ index 118a621..9dc50d0 100644
    }
  
 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodes.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodes.java
-index 1304134..fa160c6 100644
+index da2e2b1..77cdfa9 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodes.java
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodes.java
 @@ -204,7 +204,7 @@ public void testNodesQueryStateNone() throws JSONException, Exception {
@@ -2901,83 +2612,70 @@ index 1304134..fa160c6 100644
    }
  
    public void testNodesHelper(String path, String media) throws JSONException,
-diff --git a/hadoop-yarn-project/hadoop-yarn/pom.xml b/hadoop-yarn-project/hadoop-yarn/pom.xml
-index bc36c85..01ee8a1 100644
---- a/hadoop-yarn-project/hadoop-yarn/pom.xml
-+++ b/hadoop-yarn-project/hadoop-yarn/pom.xml
-@@ -44,16 +44,8 @@
-           <artifactId>commons-el</artifactId>
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml
+index 3e86381..37f376d 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml
+@@ -50,10 +50,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
          </exclusion>
-         <exclusion>
--          <groupId>tomcat</groupId>
--          <artifactId>jasper-runtime</artifactId>
--        </exclusion>
--        <exclusion>
--          <groupId>tomcat</groupId>
--          <artifactId>jasper-compiler</artifactId>
--        </exclusion>
 -        <exclusion>
 -          <groupId>org.mortbay.jetty</groupId>
 -          <artifactId>jsp-2.1-jetty</artifactId>
-+          <groupId>org.apache.tomcat</groupId>
-+          <artifactId>tomcat-jasper</artifactId>
-         </exclusion>
+-        </exclusion>
        </exclusions>
      </dependency>
-@@ -112,6 +104,11 @@
-       <artifactId>guice</artifactId>
-     </dependency>
-     <dependency>
-+        <groupId>cglib</groupId>
-+        <artifactId>cglib</artifactId>
-+        <scope>provided</scope>
-+    </dependency>
-+    <dependency>
-       <groupId>com.sun.jersey.jersey-test-framework</groupId>
-       <artifactId>jersey-test-framework-core</artifactId>
-       <scope>test</scope>
-diff --git a/hadoop-yarn-project/pom.xml b/hadoop-yarn-project/pom.xml
-index 8f117b2..ee95376 100644
---- a/hadoop-yarn-project/pom.xml
-+++ b/hadoop-yarn-project/pom.xml
-@@ -51,8 +51,8 @@
-       <artifactId>avro</artifactId>
-       <exclusions>
-         <exclusion>
--          <groupId>org.mortbay.jetty</groupId>
--          <artifactId>jetty</artifactId>
-+          <groupId>org.eclipse.jetty</groupId>
-+          <artifactId>jetty-server</artifactId>
+     <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
+index b7c6fd3..8e557ff 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
+@@ -56,10 +56,6 @@
+           <groupId>tomcat</groupId>
+           <artifactId>jasper-compiler</artifactId>
          </exclusion>
-         <exclusion>
-           <groupId>org.apache.ant</groupId>
-@@ -86,16 +86,8 @@
-           <artifactId>commons-el</artifactId>
-         </exclusion>
-         <exclusion>
--          <groupId>tomcat</groupId>
--          <artifactId>jasper-runtime</artifactId>
--        </exclusion>
--        <exclusion>
--          <groupId>tomcat</groupId>
--          <artifactId>jasper-compiler</artifactId>
--        </exclusion>
 -        <exclusion>
 -          <groupId>org.mortbay.jetty</groupId>
 -          <artifactId>jsp-2.1-jetty</artifactId>
-+          <groupId>org.apache.tomcat</groupId>
-+          <artifactId>tomcat-jasper</artifactId>
-         </exclusion>
+-        </exclusion>
        </exclusions>
      </dependency>
-@@ -133,6 +125,10 @@
-       <artifactId>guice</artifactId>
+     <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+@@ -109,8 +105,8 @@
+       <artifactId>commons-logging</artifactId>
      </dependency>
      <dependency>
-+        <groupId>cglib</groupId>
-+        <artifactId>cglib</artifactId>
-+    </dependency>
-+    <dependency>
-       <groupId>com.sun.jersey</groupId>
-       <artifactId>jersey-server</artifactId>
+-      <groupId>org.mortbay.jetty</groupId>
+-      <artifactId>jetty</artifactId>
++      <groupId>org.eclipse.jetty</groupId>
++      <artifactId>jetty-server</artifactId>
      </dependency>
+ 
+     <dependency>
+diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
+index 1be0115..420a41c 100644
+--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
+@@ -59,9 +59,9 @@
+ import org.junit.AfterClass;
+ import org.junit.BeforeClass;
+ import org.junit.Test;
+-import org.mortbay.jetty.Server;
+-import org.mortbay.jetty.servlet.Context;
+-import org.mortbay.jetty.servlet.ServletHolder;
++import org.eclipse.jetty.server.Server;
++import org.eclipse.jetty.servlet.ServletContextHandler;
++import org.eclipse.jetty.servlet.ServletHolder;
+ 
+ /**
+  * Test the WebAppProxyServlet and WebAppProxy. For back end use simple web
+@@ -81,7 +81,7 @@
+   @BeforeClass
+   public static void start() throws Exception {
+     server = new Server(0);
+-    Context context = new Context();
++    ServletContextHandler context = new ServletContextHandler();
+     context.setContextPath("/foo");
+     server.setHandler(context);
+     context.addServlet(new ServletHolder(TestServlet.class), "/bar/");
diff --git a/hadoop-guava-15.0.patch b/hadoop-guava-15.0.patch
new file mode 100644
index 0000000..aef84cf
--- /dev/null
+++ b/hadoop-guava-15.0.patch
@@ -0,0 +1,144 @@
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
+index c117ee8..9434429 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
+@@ -68,7 +68,6 @@
+ import com.google.common.base.Stopwatch;
+ import com.google.common.collect.ImmutableList;
+ import com.google.common.collect.Range;
+-import com.google.common.collect.Ranges;
+ import com.google.protobuf.TextFormat;
+ 
+ /**
+@@ -853,7 +852,7 @@ public synchronized void acceptRecovery(RequestInfo reqInfo,
+   private Range<Long> txnRange(SegmentStateProto seg) {
+     Preconditions.checkArgument(seg.hasEndTxId(),
+         "invalid segment: %s", seg);
+-    return Ranges.closed(seg.getStartTxId(), seg.getEndTxId());
++    return Range.closed(seg.getStartTxId(), seg.getEndTxId());
+   }
+ 
+   /**
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
+index 5075da9..0d868d4 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
+@@ -62,7 +62,7 @@
+ 
+ import com.google.common.collect.Lists;
+ import com.google.common.collect.Maps;
+-import com.google.common.io.LimitInputStream;
++import com.google.common.io.ByteStreams;
+ import com.google.protobuf.CodedOutputStream;
+ 
+ /**
+@@ -215,7 +215,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) {
+ 
+       for (FileSummary.Section s : sections) {
+         channel.position(s.getOffset());
+-        InputStream in = new BufferedInputStream(new LimitInputStream(fin,
++        InputStream in = new BufferedInputStream(ByteStreams.limit(fin,
+             s.getLength()));
+ 
+         in = FSImageUtil.wrapInputStreamForCompression(conf,
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
+index c8033dd..b312bfe 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
+@@ -33,7 +33,7 @@
+ import org.apache.hadoop.io.IOUtils;
+ 
+ import com.google.common.base.Preconditions;
+-import com.google.common.io.LimitInputStream;
++import com.google.common.io.ByteStreams;
+ 
+ /**
+  * This is the tool for analyzing file sizes in the namespace image. In order to
+@@ -106,7 +106,7 @@ void visit(RandomAccessFile file) throws IOException {
+ 
+         in.getChannel().position(s.getOffset());
+         InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
+-            summary.getCodec(), new BufferedInputStream(new LimitInputStream(
++            summary.getCodec(), new BufferedInputStream(ByteStreams.limit(
+                 in, s.getLength())));
+         run(is);
+         output();
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java
+index d80fcf1..e025f82 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java
+@@ -50,7 +50,7 @@
+ 
+ import com.google.common.collect.Lists;
+ import com.google.common.collect.Maps;
+-import com.google.common.io.LimitInputStream;
++import com.google.common.io.ByteStreams;
+ 
+ /**
+  * LsrPBImage displays the blocks of the namespace in a format very similar
+@@ -110,7 +110,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) {
+       for (FileSummary.Section s : sections) {
+         fin.getChannel().position(s.getOffset());
+         InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
+-            summary.getCodec(), new BufferedInputStream(new LimitInputStream(
++            summary.getCodec(), new BufferedInputStream(ByteStreams.limit(
+                 fin, s.getLength())));
+ 
+         switch (SectionName.fromString(s.getName())) {
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+index 99617b8..c613591 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+@@ -52,7 +52,7 @@
+ import org.apache.hadoop.io.IOUtils;
+ 
+ import com.google.common.collect.Lists;
+-import com.google.common.io.LimitInputStream;
++import com.google.common.io.ByteStreams;
+ 
+ /**
+  * PBImageXmlWriter walks over an fsimage structure and writes out
+@@ -100,7 +100,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) {
+       for (FileSummary.Section s : sections) {
+         fin.getChannel().position(s.getOffset());
+         InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
+-            summary.getCodec(), new BufferedInputStream(new LimitInputStream(
++            summary.getCodec(), new BufferedInputStream(ByteStreams.limit(
+                 fin, s.getLength())));
+ 
+         switch (SectionName.fromString(s.getName())) {
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java
+index 132218c..09d42e1 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java
+@@ -47,7 +47,7 @@
+ import org.junit.Before;
+ import org.junit.Test;
+ 
+-import com.google.common.io.NullOutputStream;
++import com.google.common.io.ByteStreams;
+ 
+ public class TestDataTransferKeepalive {
+   final Configuration conf = new HdfsConfiguration();
+@@ -224,7 +224,7 @@ public void testManyClosedSocketsInCache() throws Exception {
+         stms[i] = fs.open(TEST_FILE);
+       }
+       for (InputStream stm : stms) {
+-        IOUtils.copyBytes(stm, new NullOutputStream(), 1024);
++        IOUtils.copyBytes(stm, ByteStreams.nullOutputStream(), 1024);
+       }
+     } finally {
+       IOUtils.cleanup(null, stms);
+diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
+index 272dadc..dc5ae3a 100644
+--- a/hadoop-project/pom.xml
++++ b/hadoop-project/pom.xml
+@@ -310,7 +310,7 @@
+       <dependency>
+         <groupId>com.google.guava</groupId>
+         <artifactId>guava</artifactId>
+-        <version>11.0.2</version>
++        <version>15.0</version>
+       </dependency>
+       <dependency>
+         <groupId>commons-cli</groupId>
diff --git a/hadoop-hdfs-site.xml b/hadoop-hdfs-site.xml
index dbd8ee2..2e543b0 100644
--- a/hadoop-hdfs-site.xml
+++ b/hadoop-hdfs-site.xml
@@ -34,19 +34,19 @@
   </property>
   <property>
      <name>hadoop.tmp.dir</name>
-     <value>/var/cache/hadoop-hdfs/${user.name}</value>
+     <value>/var/lib/hadoop-hdfs/${user.name}</value>
   </property>
   <property>
      <name>dfs.namenode.name.dir</name>
-     <value>file:///var/cache/hadoop-hdfs/${user.name}/dfs/namenode</value>
+     <value>file:///var/lib/hadoop-hdfs/${user.name}/dfs/namenode</value>
   </property>
   <property>
      <name>dfs.namenode.checkpoint.dir</name>
-     <value>file:///var/cache/hadoop-hdfs/${user.name}/dfs/secondarynamenode</value>
+     <value>file:///var/lib/hadoop-hdfs/${user.name}/dfs/secondarynamenode</value>
   </property>
   <property>
      <name>dfs.datanode.data.dir</name>
-     <value>file:///var/cache/hadoop-hdfs/${user.name}/dfs/datanode</value>
+     <value>file:///var/lib/hadoop-hdfs/${user.name}/dfs/datanode</value>
   </property>
   <property>
       <name>dfs.http.address</name>
diff --git a/hadoop-maven.patch b/hadoop-maven.patch
index c644489..076a918 100644
--- a/hadoop-maven.patch
+++ b/hadoop-maven.patch
@@ -1,8 +1,8 @@
 diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
-index 89691c6..c9d1ee9 100644
+index 7eae610..c05a202 100644
 --- a/hadoop-common-project/hadoop-common/pom.xml
 +++ b/hadoop-common-project/hadoop-common/pom.xml
-@@ -353,16 +353,6 @@
+@@ -364,16 +364,6 @@
          </executions>
        </plugin>
        <plugin>
@@ -19,7 +19,7 @@ index 89691c6..c9d1ee9 100644
          <groupId>org.apache.avro</groupId>
          <artifactId>avro-maven-plugin</artifactId>
          <executions>
-@@ -464,6 +454,10 @@
+@@ -480,6 +470,10 @@
          <groupId>org.apache.maven.plugins</groupId>
          <artifactId>maven-surefire-plugin</artifactId>
          <configuration>
@@ -31,10 +31,10 @@ index 89691c6..c9d1ee9 100644
              <property>
                <name>listener</name>
 diff --git a/pom.xml b/pom.xml
-index 6eb6efc..99c6992 100644
+index 9c824ff..d354566 100644
 --- a/pom.xml
 +++ b/pom.xml
-@@ -367,6 +367,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs
+@@ -387,6 +387,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs
            <plugin>
              <groupId>org.apache.maven.plugins</groupId>
              <artifactId>maven-javadoc-plugin</artifactId>
diff --git a/hadoop-netty-3.6.6-Final.patch b/hadoop-netty-3.6.6-Final.patch
new file mode 100644
index 0000000..4a093b0
--- /dev/null
+++ b/hadoop-netty-3.6.6-Final.patch
@@ -0,0 +1,31 @@
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
+index 2a532e4..9e0849b 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
+@@ -38,12 +38,10 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ 
+   <dependencyManagement>
+     <dependencies>
+-      <!-- This is a really old version of netty, that gets privatized
+-           via shading and hence it is not managed via a parent pom -->
+       <dependency>
+         <groupId>org.jboss.netty</groupId>
+         <artifactId>netty</artifactId>
+-        <version>3.2.4.Final</version>
++        <version>3.6.6.Final</version>
+       </dependency>
+     </dependencies>
+   </dependencyManagement>
+diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
+index 272dadc..41a5a9b 100644
+--- a/hadoop-project/pom.xml
++++ b/hadoop-project/pom.xml
+@@ -462,7 +462,7 @@
+       <dependency>
+         <groupId>io.netty</groupId>
+         <artifactId>netty</artifactId>
+-        <version>3.6.2.Final</version>
++        <version>3.6.6.Final</version>
+       </dependency>
+ 
+       <dependency>
diff --git a/hadoop-no-download-tomcat.patch b/hadoop-no-download-tomcat.patch
index f881fff..c04a90b 100644
--- a/hadoop-no-download-tomcat.patch
+++ b/hadoop-no-download-tomcat.patch
@@ -1,8 +1,8 @@
 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
-index 72f3b7b..1d99c76 100644
+index 4134f82..cdac7e6 100644
 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
 +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
-@@ -523,50 +523,6 @@
+@@ -523,53 +523,6 @@
              <artifactId>maven-antrun-plugin</artifactId>
              <executions>
                <execution>
@@ -37,6 +37,9 @@ index 72f3b7b..1d99c76 100644
 -                    <delete file="${httpfs.tomcat.dist.dir}/conf/server.xml"/>
 -                    <copy file="${basedir}/src/main/tomcat/server.xml"
 -                          toDir="${httpfs.tomcat.dist.dir}/conf"/>
+-                    <delete file="${httpfs.tomcat.dist.dir}/conf/ssl-server.xml"/>
+-                    <copy file="${basedir}/src/main/tomcat/ssl-server.xml"
+-                          toDir="${httpfs.tomcat.dist.dir}/conf"/>
 -                    <delete file="${httpfs.tomcat.dist.dir}/conf/logging.properties"/>
 -                    <copy file="${basedir}/src/main/tomcat/logging.properties"
 -                          toDir="${httpfs.tomcat.dist.dir}/conf"/>
diff --git a/hadoop-tools.jar.patch b/hadoop-tools.jar.patch
new file mode 100644
index 0000000..e5db1fb
--- /dev/null
+++ b/hadoop-tools.jar.patch
@@ -0,0 +1,32 @@
+diff --git a/hadoop-common-project/hadoop-annotations/pom.xml b/hadoop-common-project/hadoop-annotations/pom.xml
+index 1e109af..c53229a 100644
+--- a/hadoop-common-project/hadoop-annotations/pom.xml
++++ b/hadoop-common-project/hadoop-annotations/pom.xml
+@@ -48,11 +48,8 @@
+       </activation>
+       <dependencies>
+         <dependency>
+-          <groupId>jdk.tools</groupId>
+-          <artifactId>jdk.tools</artifactId>
+-          <version>1.6</version>
+-          <scope>system</scope>
+-          <systemPath>${java.home}/../lib/tools.jar</systemPath>
++          <groupId>com.sun</groupId>
++          <artifactId>tools</artifactId>
+         </dependency>
+       </dependencies>
+     </profile>
+@@ -63,11 +60,8 @@
+       </activation>
+       <dependencies>
+         <dependency>
+-          <groupId>jdk.tools</groupId>
+-          <artifactId>jdk.tools</artifactId>
+-          <version>1.7</version>
+-          <scope>system</scope>
+-          <systemPath>${java.home}/../lib/tools.jar</systemPath>
++          <groupId>com.sun</groupId>
++          <artifactId>tools</artifactId>
+         </dependency>
+       </dependencies>
+     </profile>
diff --git a/hadoop.spec b/hadoop.spec
index a11535d..8b69815 100644
--- a/hadoop.spec
+++ b/hadoop.spec
@@ -7,65 +7,62 @@
 %global package_libhdfs 0
 %endif
 
-%global commit 2e01e27e5ba4ece19650484f646fac42596250ce
+%global commit 9d04888c2ca6ffc0d11e5fd894e3fa567398214a
 %global shortcommit %(c=%{commit}; echo ${c:0:7})
 
 %global hadoop_version %{version}
-%global hdfs_services hadoop-zkfc.service hadoop-datanode.service hadoop-secondarynamenode.service hadoop-namenode.service
+%global hdfs_services hadoop-zkfc.service hadoop-datanode.service hadoop-secondarynamenode.service hadoop-namenode.service hadoop-journalnode.service
 %global mapreduce_services hadoop-historyserver.service
-%global yarn_services hadoop-proxyserver.service hadoop-resourcemanager.service hadoop-nodemanager.service
+%global yarn_services hadoop-proxyserver.service hadoop-resourcemanager.service hadoop-nodemanager.service hadoop-timelineserver.service
 
 # Filter out undesired provides and requires
 %global __requires_exclude_from ^%{_libdir}/%{name}/libhadoop.so$
 %global __provides_exclude_from ^%{_libdir}/%{name}/.*$
 
+%bcond_with javadoc
+
 Name:   hadoop
-Version: 2.2.0
-Release: 7%{?dist}
+Version: 2.4.0
+Release: 1%{?dist}
 Summary: A software platform for processing vast amounts of data
 # The BSD license file is missing
 # https://issues.apache.org/jira/browse/HADOOP-9849
 License: ASL 2.0 and BSD
-Group:  Development/Libraries
 URL: http://hadoop.apache.org
 Source0: https://github.com/apache/hadoop-common/archive/%{commit}/%{name}-%{version}-%{shortcommit}.tar.gz
-Source1: hadoop-layout.sh
-Source2: hadoop-hdfs.service.template
-Source3: hadoop-mapreduce.service.template
-Source4: hadoop-yarn.service.template
-Source6: hadoop.logrotate
-Source8: hadoop-core-site.xml
-Source9: hadoop-hdfs-site.xml
-Source10: hadoop-mapred-site.xml
-Source11: hadoop-yarn-site.xml
-Source12: hadoop-httpfs.sysconfig
+Source1: %{name}-layout.sh
+Source2: %{name}-hdfs.service.template
+Source3: %{name}-mapreduce.service.template
+Source4: %{name}-yarn.service.template
+Source6: %{name}.logrotate
+Source8: %{name}-core-site.xml
+Source9: %{name}-hdfs-site.xml
+Source10: %{name}-mapred-site.xml
+Source11: %{name}-yarn-site.xml
+Source12: %{name}-httpfs.sysconfig
 Source13: hdfs-create-dirs
-Source14: hadoop-tomcat-users.xml
+Source14: %{name}-tomcat-users.xml
 # This patch includes the following upstream tickets:
-# https://issues.apache.org/jira/browse/HADOOP-9594
-# https://issues.apache.org/jira/browse/MAPREDUCE-5431
-# https://issues.apache.org/jira/browse/HADOOP-9611
 # https://issues.apache.org/jira/browse/HADOOP-9613
-# https://issues.apache.org/jira/browse/HADOOP-9623
 # https://issues.apache.org/jira/browse/HDFS-5411
-# https://issues.apache.org/jira/browse/HADOOP-10067
-# https://issues.apache.org/jira/browse/HDFS-5075
 # https://issues.apache.org/jira/browse/HADOOP-10068
 # https://issues.apache.org/jira/browse/HADOOP-10075
 # https://issues.apache.org/jira/browse/HADOOP-10076
-Patch0: hadoop-fedora-integration.patch
+Patch0: %{name}-fedora-integration.patch
 # Fedora packaging guidelines for JNI library loading
-Patch2: hadoop-jni-library-loading.patch
+Patch2: %{name}-jni-library-loading.patch
 # Clean up warnings with maven 3.0.5
-Patch3: hadoop-maven.patch
+Patch3: %{name}-maven.patch
 # Don't download tomcat
-Patch4: hadoop-no-download-tomcat.patch
+Patch4: %{name}-no-download-tomcat.patch
 # Use dlopen to find libjvm.so
-Patch5: hadoop-dlopen-libjvm.patch
-# Update to jetty 9.1.0
-Patch6: hadoop-jetty-9.1.0.patch
-# Update to Guava 0.15
-Patch7: hadoop-guava-0.15.patch
+Patch5: %{name}-dlopen-libjvm.patch
+# Update to Guava 15.0
+Patch7: %{name}-guava-15.0.patch
+# Update to Netty 3.6.6-Final
+Patch8: %{name}-netty-3.6.6-Final.patch
+# Remove problematic issues with tools.jar
+Patch9: %{name}-tools.jar.patch
 # The native bits don't compile on ARM
 ExcludeArch: %{arm}
 
@@ -113,6 +110,7 @@ BuildRequires: grizzly
 BuildRequires: guava
 BuildRequires: guice-servlet
 BuildRequires: hamcrest
+BuildRequires: hawtjni
 BuildRequires: hsqldb
 BuildRequires: httpcomponents-client
 BuildRequires: httpcomponents-core
@@ -128,17 +126,20 @@ BuildRequires: jersey
 BuildRequires: jersey-contribs
 BuildRequires: jets3t
 BuildRequires: jettison
-# May need to break down into specific jetty rpms
-BuildRequires: jetty
-BuildRequires: jetty-jspc-maven-plugin
-BuildRequires: jetty-util-ajax
+BuildRequires: jetty8
 BuildRequires: jsch
 BuildRequires: json_simple
+BuildRequires: jspc
 BuildRequires: jsr-305
 BuildRequires: jsr-311
 BuildRequires: junit
 BuildRequires: jzlib
+BuildRequires: leveldbjni
+%if 0%{?fedora} < 21
 BuildRequires: log4j
+%else
+BuildRequires: log4j12
+%endif
 BuildRequires: make
 BuildRequires: maven
 BuildRequires: maven-antrun-plugin
@@ -160,14 +161,19 @@ BuildRequires: maven-remote-resources-plugin
 BuildRequires: maven-shade-plugin
 BuildRequires: maven-surefire-plugin
 BuildRequires: maven-war-plugin
+BuildRequires: metrics
 BuildRequires: mockito
 BuildRequires: native-maven-plugin
-%if 0%{fedora} < 21
+%if 0%{?fedora} < 21
 BuildRequires: netty
 %else
 BuildRequires: netty3
 %endif
+%if 0%{?fedora} > 20
+BuildRequires: objectweb-asm3
+%else
 BuildRequires: objectweb-asm
+%endif
 BuildRequires: objenesis >= 1.2-16
 BuildRequires: openssl-devel
 BuildRequires: paranamer
@@ -181,12 +187,15 @@ BuildRequires: snappy-java
 BuildRequires: systemd
 BuildRequires: tomcat
 BuildRequires: tomcat-el-2.2-api
+%if 0%{?fedora} > 20
+BuildRequires: tomcat-log4j
+%endif
 BuildRequires: tomcat-servlet-3.0-api
 BuildRequires: txw2
 BuildRequires: which
 BuildRequires: xmlenc
 BuildRequires: znerd-oss-parent
-%if 0%{fedora} < 21
+%if 0%{?fedora} < 21
 BuildRequires: zookeeper-java
 %else
 BuildRequires: zookeeper-java > 3.4.5-15
@@ -194,7 +203,11 @@ BuildRequires: zookeeper-java > 3.4.5-15
 
 # For tests
 BuildRequires: jersey-test-framework
+%if 0%{?fedora} > 20
+BuildRequires: maven-surefire-provider-junit
+%else
 BuildRequires: maven-surefire-provider-junit4
+%endif
 
 %description
 Apache Hadoop is a framework that allows for the distributed processing of
@@ -204,7 +217,6 @@ offering local computation and storage.
 
 %package client
 Summary: Libraries for Apache Hadoop clients
-Group: Applications/System
 BuildArch: noarch
 Requires: %{name}-common = %{version}-%{release}
 Requires: %{name}-hdfs = %{version}-%{release}
@@ -221,7 +233,6 @@ This package provides libraries for Apache Hadoop clients.
 
 %package common
 Summary: Common files needed by Apache Hadoop daemons
-Group: Applications/System
 BuildArch: noarch
 Requires: /usr/sbin/useradd
 
@@ -242,14 +253,15 @@ Requires: java-base64
 Requires: java-xmlbuilder
 Requires: javamail
 Requires: jettison
-Requires: jetty-http
-Requires: jetty-io
-Requires: jetty-security
-Requires: jetty-xml
+Requires: jetty8
 Requires: jsr-311
 Requires: mockito
 Requires: nc6
+%if 0%{?fedora} > 20
+Requires: objectweb-asm3
+%else
 Requires: objectweb-asm
+%endif
 Requires: objenesis
 Requires: paranamer
 Requires: relaxngDatatype
@@ -269,7 +281,6 @@ Hadoop modules.
 
 %package common-native
 Summary: The native Apache Hadoop library file
-Group: Applications/System
 Requires: %{name}-common = %{version}-%{release}
 
 %description common-native
@@ -283,7 +294,6 @@ This package contains the native-hadoop library
 %if %{package_libhdfs}
 %package devel
 Summary: Headers for Apache Hadoop
-Group: Development/System
 Requires: libhdfs%{?_isa} = %{version}-%{release}
 
 %description devel
@@ -292,7 +302,6 @@ Header files for Apache Hadoop's hdfs library and other utilities
 
 %package hdfs
 Summary: The Apache Hadoop Distributed File System
-Group: Applications/System
 BuildArch: noarch
 Requires: apache-commons-daemon-jsvc
 Requires: %{name}-common = %{version}-%{release}
@@ -313,7 +322,6 @@ used by Apache Hadoop applications.
 %if %{package_libhdfs}
 %package hdfs-fuse
 Summary: Allows mounting of Apache Hadoop HDFS
-Group: Development/Libraries
 Requires: fuse
 Requires: libhdfs%{?_isa} = %{version}-%{release}
 Requires: %{name}-common = %{version}-%{release}
@@ -333,7 +341,6 @@ file system through fuse.
 
 %package httpfs
 Summary: Provides web access to HDFS
-Group: Applications/System
 BuildArch: noarch
 Requires: apache-commons-dbcp
 Requires: ecj >= 1:4.2.1-6
@@ -357,19 +364,19 @@ the complete FileSystem/FileContext interface in HDFS.
 # Creation of javadocs takes too many resources and results in failures  on
 # most architectures so only generate on intel 64-bit
 %ifarch x86_64
+%if %{with javadoc}
 %package javadoc
 Summary: Javadoc for Apache Hadoop
-Group: Documentation
 BuildArch: noarch
 
 %description javadoc
 This package contains the API documentation for %{name}.
 %endif
+%endif
 
 %if %{package_libhdfs}
 %package -n libhdfs
 Summary: The Apache Hadoop Filesystem Library
-Group: Development/Libraries
 Requires: %{name}-hdfs = %{version}-%{release}
 Requires: lzo
 
@@ -384,9 +391,9 @@ This package provides the Apache Hadoop Filesystem Library.
 
 %package mapreduce
 Summary: Apache Hadoop MapReduce (MRv2)
-Group: Applications/System
 BuildArch: noarch
 Requires(pre): %{name}-common = %{version}-%{release}
+Requires(pre): %{name}-yarn = %{version}-%{release}
 Requires(post): systemd
 Requires(preun): systemd
 Requires(postun): systemd
@@ -401,9 +408,7 @@ This package provides Apache Hadoop MapReduce (MRv2).
 
 %package mapreduce-examples
 Summary: Apache Hadoop MapReduce (MRv2) examples
-Group: Applications/System
 BuildArch: noarch
-Requires: %{name}-mapreduce = %{version}-%{release}
 Requires: hsqldb
 
 %description mapreduce-examples
@@ -411,7 +416,6 @@ This package contains mapreduce examples.
 
 %package maven-plugin
 Summary: Apache Hadoop maven plugin
-Group: Development/Libraries
 BuildArch: noarch
 Requires: maven
 
@@ -436,12 +440,13 @@ This package contains test related resources for Apache Hadoop.
 
 %package yarn
 Summary: Apache Hadoop YARN
-Group: Applications/System
 BuildArch: noarch
 Requires(pre): %{name}-common = %{version}-%{release}
 Requires: aopalliance
 Requires: atinject
 Requires: hamcrest
+Requires: hawtjni
+Requires: leveldbjni
 Requires(post): systemd
 Requires(preun): systemd
 Requires(postun): systemd
@@ -456,7 +461,6 @@ This package contains Apache Hadoop YARN.
 
 %package yarn-security
 Summary: The ability to run Apache Hadoop YARN in secure mode
-Group: Applications/System
 Requires: %{name}-yarn = %{version}-%{release}
 
 %description yarn-security
@@ -476,46 +480,36 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
 %if %{package_libhdfs}
 %patch5 -p1
 %endif
-%if 0%{fedora} >= 21
-%patch6 -p1
+%if 0%{?fedora} >= 21
 %patch7 -p1
+%patch8 -p1
 %endif
+%patch9 -p1
 
-%if 0%{fedora} < 21
+%if 0%{?fedora} < 21
 # The hadoop test suite needs classes from the zookeeper test suite.
 # We need to modify the deps to use the pom for the zookeeper-test jar
-%pom_xpath_set "pom:project/pom:dependencies/pom:dependency[pom:artifactId='zookeeper' and pom:scope='test']/pom:artifactId" zookeeper-test hadoop-common-project/hadoop-common
-%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:groupId='org.apache.zookeeper' and pom:scope='test']/pom:type" hadoop-common-project/hadoop-common
-%pom_xpath_inject "pom:project/pom:dependencies/pom:dependency[pom:groupId='org.apache.zookeeper' and pom:scope='test']" "
+fix_zookeeper_test()
+{
+%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='zookeeper' and pom:scope='test']/pom:type" $1 
+%pom_xpath_inject "pom:project/pom:dependencies/pom:dependency[pom:artifactId='zookeeper' and pom:scope='test']" " 
       <exclusions>
         <exclusion>
           <groupId>org.jboss.netty</groupId>
           <artifactId>netty</artifactId>
         </exclusion>
       </exclusions>
-  " hadoop-common-project/hadoop-common
+  " $1
+%pom_xpath_set "pom:project/pom:dependencies/pom:dependency[pom:artifactId='zookeeper' and pom:scope='test']/pom:artifactId" zookeeper-test $1 
+}
 
-%pom_xpath_set "pom:project/pom:dependencies/pom:dependency[pom:artifactId='zookeeper' and pom:scope='test']/pom:artifactId" zookeeper-test hadoop-hdfs-project/hadoop-hdfs
-%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:groupId='org.apache.zookeeper' and pom:scope='test']/pom:type" hadoop-hdfs-project/hadoop-hdfs
-%pom_xpath_inject "pom:project/pom:dependencies/pom:dependency[pom:groupId='org.apache.zookeeper' and pom:scope='test']" "
-      <exclusions>
-        <exclusion>
-          <groupId>org.jboss.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-      </exclusions>
-" hadoop-hdfs-project/hadoop-hdfs
+fix_zookeeper_test hadoop-common-project/hadoop-common
+fix_zookeeper_test hadoop-hdfs-project/hadoop-hdfs
+fix_zookeeper_test hadoop-hdfs-project/hadoop-hdfs-nfs
+fix_zookeeper_test hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager
 
-%pom_xpath_set "pom:project/pom:dependencies/pom:dependency[pom:artifactId='zookeeper' and pom:scope='test']/pom:artifactId" zookeeper-test hadoop-hdfs-project/hadoop-hdfs-nfs
-%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:groupId='org.apache.zookeeper' and pom:scope='test']/pom:type" hadoop-hdfs-project/hadoop-hdfs-nfs
-%pom_xpath_inject "pom:project/pom:dependencies/pom:dependency[pom:groupId='org.apache.zookeeper' and pom:scope='test']" "
-      <exclusions>
-        <exclusion>
-          <groupId>org.jboss.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-      </exclusions>
-" hadoop-hdfs-project/hadoop-hdfs-nfs
+sed -i "s/:pom//" hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml
+fix_zookeeper_test hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client
 %endif
 
 # Remove the maven-site-plugin.  It's not needed
@@ -546,6 +540,27 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
 %pom_remove_plugin :maven-checkstyle-plugin hadoop-project
 %pom_remove_plugin :maven-checkstyle-plugin hadoop-tools/hadoop-distcp
 
+# Disable the hadoop-minikdc module due to missing deps
+%pom_disable_module hadoop-minikdc hadoop-common-project
+%pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-auth
+%pom_remove_dep :hadoop-minikdc hadoop-project
+%pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests
+rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java
+rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
+rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAltKerberosAuthenticationHandler.java
+rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
+rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
+
+# Add dependencies for timeline service
+%pom_add_dep org.iq80.leveldb:leveldb hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice
+%pom_add_dep org.fusesource.hawtjni:hawtjni-runtime hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice
+
+# Fix scope on hadoop-common:test-jar
+%pom_xpath_set "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-common' and pom:type='test-jar']/pom:scope" test hadoop-tools/hadoop-openstack
+
+# Modify asm version to compat library version 3.3.6
+#%%pom_xpath_set "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='asm']/pom:version" 3.3.6 hadoop-project
+
 # War files we don't want
 %mvn_package :%{name}-auth-examples __noinstall
 %mvn_package :%{name}-hdfs-httpfs __noinstall
@@ -568,7 +583,9 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
 %mvn_package :%{name}-distcp::{}: %{name}-mapreduce
 %mvn_package :%{name}-extras::{}: %{name}-mapreduce
 %mvn_package :%{name}-gridmix::{}: %{name}-mapreduce
+%mvn_package :%{name}-openstack::{}: %{name}-mapreduce
 %mvn_package :%{name}-rumen::{}: %{name}-mapreduce
+%mvn_package :%{name}-sls::{}: %{name}-mapreduce
 %mvn_package :%{name}-streaming::{}: %{name}-mapreduce
 %mvn_package :%{name}-pipes::{}: %{name}-mapreduce
 %mvn_package :%{name}-tools*::{}: %{name}-mapreduce
@@ -577,16 +594,15 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
 %mvn_package :%{name}-yarn*::{}: %{name}-yarn
 
 # Jar files that need to be overridden due to installation location
-%if 0%{fedora} < 21
-# Workaround for bz1023116
-#%%mvn_file :%{name}-common::{}: %{_jnidir}/%{name}-common %{_datadir}/%{name}/common/%{name}-common
-%mvn_file :%{name}-common::{}: %{_jnidir}/%{name}/%{name}-common
-%endif
 %mvn_file :%{name}-common::tests: %{name}/%{name}-common
 
 %build
 %ifnarch x86_64
 opts="-j"
+%else
+%if %{without javadoc}
+opts="-j"
+%endif
 %endif
 %mvn_build $opts -- -Drequire.snappy=true -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipTest -DskipIT
 
@@ -629,6 +645,25 @@ EOL
   %add_maven_depmap -f %{name}-tests $pom %{name}/$1.jar
 }
 
+# Create symlinks for jars from the build
+# $1 the location to create the symlink
+link_hadoop_jars()
+{
+  for f in `ls hadoop-* | grep -v tests | grep -v examples`
+  do
+    n=`echo $f | sed "s/-%{version}//"`
+    if [ -L $1/$n ]
+    then
+      continue
+    elif [ -e $1/$f ]
+    then
+      rm -f $1/$f $1/$n
+    fi
+    p=`find %{buildroot}/%{_jnidir} %{buildroot}/%{_javadir}/%{name} -name $n | sed "s#%{buildroot}##"`
+    %{__ln_s} $p $1/$n
+  done
+}
+
 %mvn_install
 
 install -d -m 0755 %{buildroot}/%{_libdir}/%{name}
@@ -638,17 +673,17 @@ install -d -m 0755 %{buildroot}/%{_jnidir}/%{name}
 install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/client/lib
 install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/common/lib
 install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/hdfs/lib
+install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/hdfs/webapps
 install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps
 install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
 install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/yarn/lib
-install -d -m 0755 %{buildroot}/%{_sharedstatedir}/%{name}-hdfs/webapps/hdfs
 install -d -m 0755 %{buildroot}/%{_sharedstatedir}/tomcats/httpfs
 install -d -m 0755 %{buildroot}/%{_sysconfdir}/%{name}/tomcat/Catalina/localhost
 install -d -m 0755 %{buildroot}/%{_sysconfdir}/logrotate.d
 install -d -m 0755 %{buildroot}/%{_sysconfdir}/sysconfig
 install -d -m 0755 %{buildroot}/%{_tmpfilesdir}
+install -d -m 0755 %{buildroot}/%{_var}/lib/%{name}-hdfs
 install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-yarn
-install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-hdfs
 install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-httpfs/temp
 install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-httpfs/work
 install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-mapreduce
@@ -709,35 +744,37 @@ install -pm 644 %{name}-project-dist/target/%{name}-project-dist-%{hadoop_versio
 install -pm 644 hadoop-project-dist/pom.xml %{buildroot}/%{_mavenpomdir}/JPP.%{name}-%{name}-project-dist.pom
 %add_maven_depmap JPP.%{name}-%{name}-project-dist.pom %{name}/%{name}-project-dist.jar
 
-# Workaround for bz1023116
-%{__ln_s} %{_jnidir}/%{name}/%{name}-common.jar %{buildroot}/%{_datadir}/%{name}/common
-
 # client jar depenencies
 copy_dep_jars %{name}-client/target/%{name}-client-%{hadoop_version}/share/%{name}/client/lib %{buildroot}/%{_datadir}/%{name}/client/lib
 %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/client/lib
-%{__ln_s} %{_jnidir}/%{name}-common.jar %{buildroot}/%{_datadir}/%{name}/client/lib
-%{__ln_s} %{_javadir}/%{name}/%{name}-client.jar %{buildroot}/%{_datadir}/%{name}/client
-for f in annotations auth hdfs mapreduce-client-app mapreduce-client-common mapreduce-client-core mapreduce-client-jobclient mapreduce-client-shuffle yarn-api yarn-client yarn-common yarn-server-common
-do
-  %{__ln_s} %{_javadir}/%{name}/%{name}-$f.jar %{buildroot}/%{_datadir}/%{name}/client/lib
-done
+pushd  %{name}-client/target/%{name}-client-%{hadoop_version}/share/%{name}/client/lib
+  link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/client/lib
+popd
+pushd  %{name}-client/target/%{name}-client-%{hadoop_version}/share/%{name}/client
+  link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/client
+popd
 
 # common jar depenencies
 copy_dep_jars $basedir/share/%{name}/common/lib %{buildroot}/%{_datadir}/%{name}/common/lib
 %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/common/lib
-%{__ln_s} %{_javadir}/%{name}/%{name}-nfs.jar %{buildroot}/%{_datadir}/%{name}/common
-echo "%{_datadir}/%{name}/common/%{name}-nfs.jar" >> .mfiles
-for f in annotations auth
+pushd $basedir/share/%{name}/common
+  link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/common
+popd
+for f in `ls %{buildroot}/%{_datadir}/%{name}/common/*.jar`
 do
-  %{__ln_s} %{_javadir}/%{name}/%{name}-$f.jar %{buildroot}/%{_datadir}/%{name}/common/lib
+  echo "$f" | sed "s|%{buildroot}||" >> .mfiles
 done
+pushd  $basedir/share/%{name}/common/lib
+  link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/common/lib
+popd
 
 # hdfs jar dependencies
 copy_dep_jars $basedir/share/%{name}/hdfs/lib %{buildroot}/%{_datadir}/%{name}/hdfs/lib
 %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/hdfs/lib
-%{__ln_s} %{_javadir}/%{name}/%{name}-hdfs.jar %{buildroot}/%{_datadir}/%{name}/hdfs
-%{__ln_s} %{_javadir}/%{name}/%{name}-hdfs-nfs.jar %{buildroot}/%{_datadir}/%{name}/hdfs
 %{__ln_s} %{_javadir}/%{name}/%{name}-hdfs-bkjournal.jar %{buildroot}/%{_datadir}/%{name}/hdfs/lib
+pushd $basedir/share/%{name}/hdfs
+  link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/hdfs
+popd
 
 # httpfs
 # Create the webapp directory structure
@@ -777,13 +814,7 @@ rm -f %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib
 rm -f %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib/tomcat-*.jar
 %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib
 pushd %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib
-  for f in `ls hadoop-*`
-  do
-    n=`echo $f | sed "s/-%{version}//"`
-    rm -f $f
-    p=`find %{buildroot}/%{_jnidir} %{buildroot}/%{_javadir}/%{name} -name $n | sed "s#%{buildroot}##"`
-    %{__ln_s} $p $f
-  done
+  link_hadoop_jars .
 popd
 
 pushd %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat
@@ -799,29 +830,20 @@ popd
 copy_dep_jars $basedir/share/%{name}/mapreduce/lib %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
 %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
 %{__ln_s} %{_javadir}/%{name}/%{name}-annotations.jar %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
-for f in app common core jobclient shuffle hs hs-plugins
-do
-  %{__ln_s} %{_javadir}/%{name}/%{name}-mapreduce-client-$f.jar %{buildroot}/%{_datadir}/%{name}/mapreduce
-done
-for f in archives datajoin distcp extras gridmix rumen streaming
-do
-  %{__ln_s} %{_javadir}/%{name}/%{name}-$f.jar %{buildroot}/%{_datadir}/%{name}/mapreduce
-done
+pushd $basedir/share/%{name}/mapreduce
+  link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/mapreduce
+popd
 
 # yarn jar dependencies
 copy_dep_jars $basedir/share/%{name}/yarn/lib %{buildroot}/%{_datadir}/%{name}/yarn/lib
 %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/yarn/lib
 %{__ln_s} %{_javadir}/%{name}/%{name}-annotations.jar %{buildroot}/%{_datadir}/%{name}/yarn/lib
-for f in api client common server-common applications-distributedshell applications-unmanaged-am-launcher server-nodemanager server-resourcemanager server-web-proxy site
-do
-  %{__ln_s} %{_javadir}/%{name}/%{name}-yarn-$f.jar %{buildroot}/%{_datadir}/%{name}/yarn
-done
+pushd $basedir/share/%{name}/yarn
+  link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/yarn
+popd
 
 # Install hdfs webapp bits
-cp -arf $basedir/share/hadoop/hdfs/webapps/* %{buildroot}/%{_sharedstatedir}/%{name}-hdfs/webapps
-pushd %{buildroot}/%{_datadir}/%{name}/hdfs
-  %{__ln_s} %{_sharedstatedir}/%{name}-hdfs/webapps webapps
-popd
+cp -arf $basedir/share/hadoop/hdfs/webapps/* %{buildroot}/%{_datadir}/%{name}/hdfs/webapps
 
 # hadoop layout. Convert to appropriate lib location for 32 and 64 bit archs
 lib=$(echo %{?_libdir} | sed -e 's:/usr/\(.*\):\1:')
@@ -842,6 +864,7 @@ install -d -m 0755 %{buildroot}/%{_unitdir}/
 for service in %{hdfs_services} %{mapreduce_services} %{yarn_services}
 do
   s=`echo $service | cut -d'-' -f 2 | cut -d'.' -f 1`
+  daemon=$s
   if [[ "%{hdfs_services}" == *$service* ]]
   then
     src=%{SOURCE2}
@@ -850,12 +873,16 @@ do
     src=%{SOURCE3}
   elif [[ "%{yarn_services}" == *$service* ]]
   then
+    if [[ "$s" == "timelineserver" ]]
+    then
+      daemon='historyserver'
+    fi
     src=%{SOURCE4}
   else
     echo "Failed to determine type of service for %service"
     exit 1
   fi
-  sed -e "s|DAEMON|$s|g" $src > %{buildroot}/%{_unitdir}/%{name}-$s.service
+  sed -e "s|DAEMON|$daemon|g" $src > %{buildroot}/%{_unitdir}/%{name}-$s.service
 done
 
 cp -f %{SOURCE12} %{buildroot}/%{_sysconfdir}/sysconfig/tomcat at httpfs
@@ -884,12 +911,19 @@ done
 install -m 0644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml %{buildroot}/%{_mavenpomdir}/JPP.%{name}-%{name}-yarn-server-tests-tests.pom
 %add_maven_depmap -f %{name}-tests JPP.%{name}-%{name}-yarn-server-tests-tests.pom %{name}/%{name}-yarn-server-tests-tests.jar
 
+%pretrans -p <lua> hdfs
+path = "%{_datadir}/%{name}/hdfs/webapps"
+st = posix.stat(path)
+if st and st.type == "link" then
+  os.remove(path)
+end
+
 %pre common
 getent group hadoop >/dev/null || groupadd -r hadoop
 
 %pre hdfs
 getent group hdfs >/dev/null || groupadd -r hdfs
-getent passwd hdfs >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop HDFS" --shell /sbin/nologin -M -r -g hdfs -G hadoop --home %{_var}/cache/%{name}-hdfs hdfs
+getent passwd hdfs >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop HDFS" --shell /sbin/nologin -M -r -g hdfs -G hadoop --home %{_var}/lib/%{name}-hdfs hdfs
 
 %pre mapreduce
 getent group mapred >/dev/null || groupadd -r mapred
@@ -913,6 +947,21 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %post hdfs
 %systemd_post %{hdfs_services}
 
+# Change the home directory for the hdfs user
+if [[ `getent passwd hdfs | cut -d: -f 6` != "%{_var}/lib/%{name}-hdfs" ]]
+then
+  /usr/sbin/usermod -d %{_var}/lib/%{name}-hdfs hdfs
+fi
+
+if [ $1 -gt 1 ]
+then
+  if [ -d %{_var}/cache/%{name}-hdfs ] && [ ! -L %{_var}/cache/%{name}-hdfs ]
+  then
+    # Move the existing hdfs data to the new location
+    mv -f %{_var}/cache/%{name}-hdfs/* %{_var}/lib/%{name}-hdfs/
+  fi
+fi
+
 %if %{package_libhdfs}
 %post -n libhdfs -p /sbin/ldconfig
 %endif
@@ -928,6 +977,9 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %postun hdfs
 %systemd_postun_with_restart %{hdfs_services}
 
+# Remove the compatibility symlink
+rm -f %{_var}/cache/%{name}-hdfs
+
 %if %{package_libhdfs}
 %postun -n libhdfs -p /sbin/ldconfig
 %endif
@@ -938,6 +990,15 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %postun yarn
 %systemd_postun_with_restart %{yarn_services}
 
+%posttrans hdfs
+# Create a symlink to the new location for hdfs data in case the user changed
+# the configuration file and the new one isn't in place to point to the
+# correct location
+if [ ! -f %{_var}/cache/%{name}-hdfs ]
+then
+  %{__ln_s} %{_var}/lib/%{name}-hdfs %{_var}/cache
+fi
+
 %files -f .mfiles-%{name}-client client
 %{_datadir}/%{name}/client
 
@@ -955,10 +1016,6 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %config(noreplace) %{_sysconfdir}/%{name}/ssl-server.xml.example
 %dir %{_datadir}/%{name}
 %dir %{_datadir}/%{name}/common
-
-# Workaround for bz1023116
-%{_datadir}/%{name}/common/%{name}-common.jar
-
 %{_datadir}/%{name}/common/lib
 %{_libexecdir}/%{name}-config.sh
 %{_libexecdir}/%{name}-layout.sh
@@ -991,6 +1048,7 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %attr(-,hdfs,hadoop) %{_sharedstatedir}/%{name}-hdfs
 %{_unitdir}/%{name}-datanode.service
 %{_unitdir}/%{name}-namenode.service
+%{_unitdir}/%{name}-journalnode.service
 %{_unitdir}/%{name}-secondarynamenode.service
 %{_unitdir}/%{name}-zkfc.service
 %{_libexecdir}/hdfs-config.sh
@@ -1002,7 +1060,7 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %config(noreplace) %attr(644, root, root) %{_sysconfdir}/logrotate.d/%{name}-hdfs
 %attr(0755,hdfs,hadoop) %dir %{_var}/run/%{name}-hdfs
 %attr(0755,hdfs,hadoop) %dir %{_var}/log/%{name}-hdfs
-%attr(0755,hdfs,hadoop) %dir %{_var}/cache/%{name}-hdfs
+%attr(0755,hdfs,hadoop) %dir %{_var}/lib/%{name}-hdfs
 
 %if %{package_libhdfs}
 %files hdfs-fuse
@@ -1028,9 +1086,11 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %attr(0775,root,tomcat) %dir %{_var}/cache/%{name}-httpfs/work
 
 %ifarch x86_64
+%if %{with javadoc}
 %files -f .mfiles-javadoc javadoc
 %doc hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/common/LICENSE.txt hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/common/NOTICE.txt
 %endif
+%endif
 
 %if %{package_libhdfs}
 %files -n libhdfs
@@ -1070,6 +1130,7 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %{_unitdir}/%{name}-nodemanager.service
 %{_unitdir}/%{name}-proxyserver.service
 %{_unitdir}/%{name}-resourcemanager.service
+%{_unitdir}/%{name}-timelineserver.service
 %{_libexecdir}/yarn-config.sh
 %{_datadir}/%{name}/yarn
 %{_bindir}/yarn
@@ -1089,6 +1150,12 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn
 %attr(6050,root,yarn) %{_bindir}/container-executor
 
 %changelog
+* Tue May 27 2014 Robert Rati <rrati at redhat> - 2.4.0-1
+- Update to upstream release 2.4.0
+- Fix fedora conditionals for non-fedora systems (BZ1083135)
+- Conditionalize javadoc generation
+- Update BuildRequires
+
 * Fri Mar 28 2014 Michael Simacek <msimacek at redhat.com> - 2.2.0-7
 - Use Requires: java-headless rebuild (#1067528)
 
diff --git a/sources b/sources
index a4ee4da..0b1ccb1 100644
--- a/sources
+++ b/sources
@@ -1 +1 @@
-96b2a80a6ffb25f3798427ca1f23f87f  hadoop-2.2.0-2e01e27.tar.gz
+9e06bb4592cb0da3d8b7af17c2d8537e  hadoop-2.4.0-9d04888.tar.gz


More information about the scm-commits mailing list