diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java index 5208bf4a4ca9..3f0b0241f027 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hdfs.web.AuthFilter; -import org.apache.hadoop.hive.shims.Utils; +import org.apache.hadoop.hive.shims.HttpUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.PseudoAuthenticator; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; @@ -235,12 +235,12 @@ public Server runServer(int port) public FilterHolder makeXSRFFilter() { String customHeader = null; // The header to look for. We use "X-XSRF-HEADER" if this is null. String methodsToIgnore = null; // Methods to not filter. By default: "GET,OPTIONS,HEAD,TRACE" if null. - FilterHolder fHolder = new FilterHolder(Utils.getXSRFFilter()); + FilterHolder fHolder = new FilterHolder(HttpUtils.getXSRFFilter()); if (customHeader != null){ - fHolder.setInitParameter(Utils.XSRF_CUSTOM_HEADER_PARAM, customHeader); + fHolder.setInitParameter(HttpUtils.XSRF_CUSTOM_HEADER_PARAM, customHeader); } if (methodsToIgnore != null){ - fHolder.setInitParameter(Utils.XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM, methodsToIgnore); + fHolder.setInitParameter(HttpUtils.XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM, methodsToIgnore); } FilterHolder xsrfFilter = fHolder; diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index fbe6da40201f..cca0e81caaec 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -41,8 +41,8 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim; +import org.apache.hadoop.hive.shims.HttpUtils; import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hadoop.hive.shims.Utils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator; import org.apache.hive.service.CookieSigner; @@ -130,7 +130,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) try { if (hiveConf.getBoolean(ConfVars.HIVE_SERVER2_XSRF_FILTER_ENABLED.varname,false)){ - boolean continueProcessing = Utils.doXsrfFilter(request,response,null,null); + boolean continueProcessing = HttpUtils.doXsrfFilter(request,response,null,null); if (!continueProcessing){ LOG.warn("Request did not have valid XSRF header, rejecting."); return; diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HttpUtils.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HttpUtils.java new file mode 100644 index 000000000000..7bf6fba7dfe0 --- /dev/null +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HttpUtils.java @@ -0,0 +1,139 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HttpUtils { + + private static final Logger LOG = LoggerFactory.getLogger(HttpUtils.class); + + public static final String XSRF_CUSTOM_HEADER_PARAM = "custom-header"; + public static final String XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM = "methods-to-ignore"; + private static final String XSRF_HEADER_DEFAULT = "X-XSRF-HEADER"; + private static final Set XSRF_METHODS_TO_IGNORE_DEFAULT = new HashSet(Arrays.asList("GET", "OPTIONS", "HEAD", "TRACE")); + + /* + * Return Hadoop-native RestCsrfPreventionFilter if it is available. + * Otherwise, construct our own copy of its logic. + */ + public static Filter getXSRFFilter() { + String filterClass = "org.apache.hadoop.security.http.RestCsrfPreventionFilter"; + try { + Class klass = (Class) Class.forName(filterClass); + Filter f = klass.newInstance(); + LOG.debug("Filter {} found, using as-is.", filterClass); + return f; + } catch (Exception e) { + // ClassNotFoundException, InstantiationException, IllegalAccessException + // Class could not be init-ed, use our local copy + LOG.debug("Unable to use {}, got exception {}. Using internal shims impl of filter.", + filterClass, e.getClass().getName()); + } + return HttpUtils.constructXSRFFilter(); + } + + private static Filter constructXSRFFilter() { + // Note Hadoop 2.7.1 onwards includes a RestCsrfPreventionFilter class that is + // usable as-is. However, since we have to work on a multitude of hadoop versions + // including very old ones, we either duplicate their code here, or not support + // an XSRFFilter on older versions of hadoop So, we duplicate to minimize evil(ugh). + // See HADOOP-12691 for details of what this is doing. + // This method should never be called if Hadoop 2.7+ is available. + + return new Filter() { + + private String headerName = XSRF_HEADER_DEFAULT; + private Set methodsToIgnore = XSRF_METHODS_TO_IGNORE_DEFAULT; + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + String customHeader = filterConfig.getInitParameter(XSRF_CUSTOM_HEADER_PARAM); + if (customHeader != null) { + headerName = customHeader; + } + String customMethodsToIgnore = filterConfig.getInitParameter( + XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM); + if (customMethodsToIgnore != null) { + parseMethodsToIgnore(customMethodsToIgnore); + } + } + + void parseMethodsToIgnore(String mti) { + String[] methods = mti.split(","); + methodsToIgnore = new HashSet(); + for (int i = 0; i < methods.length; i++) { + methodsToIgnore.add(methods[i]); + } + } + + @Override + public void doFilter( + ServletRequest request, ServletResponse response, + FilterChain chain) throws IOException, ServletException { + if (doXsrfFilter(request, response, methodsToIgnore, headerName)) { + chain.doFilter(request, response); + } + } + + @Override + public void destroy() { + // do nothing + } + }; + } + + // Method that provides similar filter functionality to filter-holder above, useful when + // calling from code that does not use filters as-is. + public static boolean doXsrfFilter(ServletRequest request, ServletResponse response, + Set methodsToIgnore, String headerName) throws IOException, ServletException { + HttpServletRequest httpRequest = (HttpServletRequest) request; + if (methodsToIgnore == null) { + methodsToIgnore = XSRF_METHODS_TO_IGNORE_DEFAULT; + } + if (headerName == null) { + headerName = XSRF_HEADER_DEFAULT; + } + if (methodsToIgnore.contains(httpRequest.getMethod()) || + httpRequest.getHeader(headerName) != null) { + return true; + } else { + ((HttpServletResponse) response).sendError( + HttpServletResponse.SC_BAD_REQUEST, + "Missing Required Header for Vulnerability Protection"); + response.getWriter().println( + "XSRF filter denial, requests must contain header : " + headerName); + return false; + } + } +} diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java index 3c93186082ae..4bcb8c3957a3 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java @@ -19,26 +19,12 @@ package org.apache.hadoop.hive.shims; import java.io.IOException; -import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; -import java.util.Set; import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.LoginException; import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag; -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier; import org.apache.hadoop.hive.thrift.DelegationTokenSelector; @@ -53,8 +39,6 @@ public class Utils { - private static final Logger LOG = LoggerFactory.getLogger(Utils.class); - private static final boolean IBM_JAVA = System.getProperty("java.vendor") .contains("IBM"); @@ -203,100 +187,5 @@ public AppConfigurationEntry[] getAppConfigurationEntry(String appName) { } } - public static final String XSRF_CUSTOM_HEADER_PARAM = "custom-header"; - public static final String XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM = "methods-to-ignore"; - private static final String XSRF_HEADER_DEFAULT = "X-XSRF-HEADER"; - private static final Set XSRF_METHODS_TO_IGNORE_DEFAULT = new HashSet(Arrays.asList("GET", "OPTIONS", "HEAD", "TRACE")); - - /* - * Return Hadoop-native RestCsrfPreventionFilter if it is available. - * Otherwise, construct our own copy of its logic. - */ - public static Filter getXSRFFilter() { - String filterClass = "org.apache.hadoop.security.http.RestCsrfPreventionFilter"; - try { - Class klass = (Class) Class.forName(filterClass); - Filter f = klass.newInstance(); - LOG.debug("Filter {} found, using as-is.", filterClass); - return f; - } catch (Exception e) { - // ClassNotFoundException, InstantiationException, IllegalAccessException - // Class could not be init-ed, use our local copy - LOG.debug("Unable to use {}, got exception {}. Using internal shims impl of filter.", - filterClass, e.getClass().getName()); - } - return Utils.constructXSRFFilter(); - } - - private static Filter constructXSRFFilter() { - // Note Hadoop 2.7.1 onwards includes a RestCsrfPreventionFilter class that is - // usable as-is. However, since we have to work on a multitude of hadoop versions - // including very old ones, we either duplicate their code here, or not support - // an XSRFFilter on older versions of hadoop So, we duplicate to minimize evil(ugh). - // See HADOOP-12691 for details of what this is doing. - // This method should never be called if Hadoop 2.7+ is available. - - return new Filter(){ - - private String headerName = XSRF_HEADER_DEFAULT; - private Set methodsToIgnore = XSRF_METHODS_TO_IGNORE_DEFAULT; - - @Override - public void init(FilterConfig filterConfig) throws ServletException { - String customHeader = filterConfig.getInitParameter(XSRF_CUSTOM_HEADER_PARAM); - if (customHeader != null) { - headerName = customHeader; - } - String customMethodsToIgnore = filterConfig.getInitParameter( - XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM); - if (customMethodsToIgnore != null) { - parseMethodsToIgnore(customMethodsToIgnore); - } - } - - void parseMethodsToIgnore(String mti) { - String[] methods = mti.split(","); - methodsToIgnore = new HashSet(); - for (int i = 0; i < methods.length; i++) { - methodsToIgnore.add(methods[i]); - } - } - - @Override - public void doFilter( - ServletRequest request, ServletResponse response, - FilterChain chain) throws IOException, ServletException { - if (doXsrfFilter(request, response, methodsToIgnore, headerName)){ - chain.doFilter(request, response); - } - } - - @Override - public void destroy() { - // do nothing - } - }; - } - - // Method that provides similar filter functionality to filter-holder above, useful when - // calling from code that does not use filters as-is. - public static boolean doXsrfFilter(ServletRequest request, ServletResponse response, - Set methodsToIgnore, String headerName) throws IOException, ServletException { - HttpServletRequest httpRequest = (HttpServletRequest)request; - if (methodsToIgnore == null) { methodsToIgnore = XSRF_METHODS_TO_IGNORE_DEFAULT ; } - if (headerName == null ) { headerName = XSRF_HEADER_DEFAULT; } - if (methodsToIgnore.contains(httpRequest.getMethod()) || - httpRequest.getHeader(headerName) != null) { - return true; - } else { - ((HttpServletResponse)response).sendError( - HttpServletResponse.SC_BAD_REQUEST, - "Missing Required Header for Vulnerability Protection"); - response.getWriter().println( - "XSRF filter denial, requests must contain header : " + headerName); - return false; - } - } - }