Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HIVE-28104. (2.3) Move HTTP related methods from Utils to HttpUtils in shims #5114

Merged
merged 1 commit into from
Mar 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
import org.apache.hadoop.hive.common.classification.InterfaceAudience;
import org.apache.hadoop.hive.common.classification.InterfaceStability;
import org.apache.hadoop.hdfs.web.AuthFilter;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.hive.shims.HttpUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
Expand Down Expand Up @@ -235,12 +235,12 @@ public Server runServer(int port)
public FilterHolder makeXSRFFilter() {
String customHeader = null; // The header to look for. We use "X-XSRF-HEADER" if this is null.
String methodsToIgnore = null; // Methods to not filter. By default: "GET,OPTIONS,HEAD,TRACE" if null.
FilterHolder fHolder = new FilterHolder(Utils.getXSRFFilter());
FilterHolder fHolder = new FilterHolder(HttpUtils.getXSRFFilter());
if (customHeader != null){
fHolder.setInitParameter(Utils.XSRF_CUSTOM_HEADER_PARAM, customHeader);
fHolder.setInitParameter(HttpUtils.XSRF_CUSTOM_HEADER_PARAM, customHeader);
}
if (methodsToIgnore != null){
fHolder.setInitParameter(Utils.XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM, methodsToIgnore);
fHolder.setInitParameter(HttpUtils.XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM, methodsToIgnore);
}
FilterHolder xsrfFilter = fHolder;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
import org.apache.hadoop.hive.shims.HttpUtils;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
import org.apache.hive.service.CookieSigner;
Expand Down Expand Up @@ -130,7 +130,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)

try {
if (hiveConf.getBoolean(ConfVars.HIVE_SERVER2_XSRF_FILTER_ENABLED.varname,false)){
boolean continueProcessing = Utils.doXsrfFilter(request,response,null,null);
boolean continueProcessing = HttpUtils.doXsrfFilter(request,response,null,null);
if (!continueProcessing){
LOG.warn("Request did not have valid XSRF header, rejecting.");
return;
Expand Down
139 changes: 139 additions & 0 deletions shims/common/src/main/java/org/apache/hadoop/hive/shims/HttpUtils.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.shims;

import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;

import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class HttpUtils {

private static final Logger LOG = LoggerFactory.getLogger(HttpUtils.class);

public static final String XSRF_CUSTOM_HEADER_PARAM = "custom-header";
public static final String XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM = "methods-to-ignore";
private static final String XSRF_HEADER_DEFAULT = "X-XSRF-HEADER";
private static final Set<String> XSRF_METHODS_TO_IGNORE_DEFAULT = new HashSet<String>(Arrays.asList("GET", "OPTIONS", "HEAD", "TRACE"));

/*
* Return Hadoop-native RestCsrfPreventionFilter if it is available.
* Otherwise, construct our own copy of its logic.
*/
public static Filter getXSRFFilter() {
String filterClass = "org.apache.hadoop.security.http.RestCsrfPreventionFilter";
try {
Class<? extends Filter> klass = (Class<? extends Filter>) Class.forName(filterClass);
Filter f = klass.newInstance();
LOG.debug("Filter {} found, using as-is.", filterClass);
return f;
} catch (Exception e) {
// ClassNotFoundException, InstantiationException, IllegalAccessException
// Class could not be init-ed, use our local copy
LOG.debug("Unable to use {}, got exception {}. Using internal shims impl of filter.",
filterClass, e.getClass().getName());
}
return HttpUtils.constructXSRFFilter();
}

private static Filter constructXSRFFilter() {
// Note Hadoop 2.7.1 onwards includes a RestCsrfPreventionFilter class that is
// usable as-is. However, since we have to work on a multitude of hadoop versions
// including very old ones, we either duplicate their code here, or not support
// an XSRFFilter on older versions of hadoop So, we duplicate to minimize evil(ugh).
// See HADOOP-12691 for details of what this is doing.
// This method should never be called if Hadoop 2.7+ is available.

return new Filter() {

private String headerName = XSRF_HEADER_DEFAULT;
private Set<String> methodsToIgnore = XSRF_METHODS_TO_IGNORE_DEFAULT;

@Override
public void init(FilterConfig filterConfig) throws ServletException {
String customHeader = filterConfig.getInitParameter(XSRF_CUSTOM_HEADER_PARAM);
if (customHeader != null) {
headerName = customHeader;
}
String customMethodsToIgnore = filterConfig.getInitParameter(
XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM);
if (customMethodsToIgnore != null) {
parseMethodsToIgnore(customMethodsToIgnore);
}
}

void parseMethodsToIgnore(String mti) {
String[] methods = mti.split(",");
methodsToIgnore = new HashSet<String>();
for (int i = 0; i < methods.length; i++) {
methodsToIgnore.add(methods[i]);
}
}

@Override
public void doFilter(
ServletRequest request, ServletResponse response,
FilterChain chain) throws IOException, ServletException {
if (doXsrfFilter(request, response, methodsToIgnore, headerName)) {
chain.doFilter(request, response);
}
}

@Override
public void destroy() {
// do nothing
}
};
}

// Method that provides similar filter functionality to filter-holder above, useful when
// calling from code that does not use filters as-is.
public static boolean doXsrfFilter(ServletRequest request, ServletResponse response,
Set<String> methodsToIgnore, String headerName) throws IOException, ServletException {
HttpServletRequest httpRequest = (HttpServletRequest) request;
if (methodsToIgnore == null) {
methodsToIgnore = XSRF_METHODS_TO_IGNORE_DEFAULT;
}
if (headerName == null) {
headerName = XSRF_HEADER_DEFAULT;
}
if (methodsToIgnore.contains(httpRequest.getMethod()) ||
httpRequest.getHeader(headerName) != null) {
return true;
} else {
((HttpServletResponse) response).sendError(
HttpServletResponse.SC_BAD_REQUEST,
"Missing Required Header for Vulnerability Protection");
response.getWriter().println(
"XSRF filter denial, requests must contain header : " + headerName);
return false;
}
}
}
111 changes: 0 additions & 111 deletions shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,26 +19,12 @@
package org.apache.hadoop.hive.shims;

import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginException;
import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
Expand All @@ -53,8 +39,6 @@

public class Utils {

private static final Logger LOG = LoggerFactory.getLogger(Utils.class);

private static final boolean IBM_JAVA = System.getProperty("java.vendor")
.contains("IBM");

Expand Down Expand Up @@ -203,100 +187,5 @@ public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
}
}

public static final String XSRF_CUSTOM_HEADER_PARAM = "custom-header";
public static final String XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM = "methods-to-ignore";
private static final String XSRF_HEADER_DEFAULT = "X-XSRF-HEADER";
private static final Set<String> XSRF_METHODS_TO_IGNORE_DEFAULT = new HashSet<String>(Arrays.asList("GET", "OPTIONS", "HEAD", "TRACE"));

/*
* Return Hadoop-native RestCsrfPreventionFilter if it is available.
* Otherwise, construct our own copy of its logic.
*/
public static Filter getXSRFFilter() {
Copy link
Member Author

@pan3793 pan3793 Mar 5, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure if the Hive community treats it as a public API, if there are concerns about compatibility, another approach is:

  1. leaving Utils as-is
  2. creating a HadoopUtils, copying all other methods to HadoopUtils
  3. changing callers to use HadoopUtils instead of Utils to avoid initializing Utils

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sounds like a more compatible solution.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Either works for me. Looks like this method is only in one place.

String filterClass = "org.apache.hadoop.security.http.RestCsrfPreventionFilter";
try {
Class<? extends Filter> klass = (Class<? extends Filter>) Class.forName(filterClass);
Filter f = klass.newInstance();
LOG.debug("Filter {} found, using as-is.", filterClass);
return f;
} catch (Exception e) {
// ClassNotFoundException, InstantiationException, IllegalAccessException
// Class could not be init-ed, use our local copy
LOG.debug("Unable to use {}, got exception {}. Using internal shims impl of filter.",
filterClass, e.getClass().getName());
}
return Utils.constructXSRFFilter();
}

private static Filter constructXSRFFilter() {
// Note Hadoop 2.7.1 onwards includes a RestCsrfPreventionFilter class that is
// usable as-is. However, since we have to work on a multitude of hadoop versions
// including very old ones, we either duplicate their code here, or not support
// an XSRFFilter on older versions of hadoop So, we duplicate to minimize evil(ugh).
// See HADOOP-12691 for details of what this is doing.
// This method should never be called if Hadoop 2.7+ is available.

return new Filter(){

private String headerName = XSRF_HEADER_DEFAULT;
private Set<String> methodsToIgnore = XSRF_METHODS_TO_IGNORE_DEFAULT;

@Override
public void init(FilterConfig filterConfig) throws ServletException {
String customHeader = filterConfig.getInitParameter(XSRF_CUSTOM_HEADER_PARAM);
if (customHeader != null) {
headerName = customHeader;
}
String customMethodsToIgnore = filterConfig.getInitParameter(
XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM);
if (customMethodsToIgnore != null) {
parseMethodsToIgnore(customMethodsToIgnore);
}
}

void parseMethodsToIgnore(String mti) {
String[] methods = mti.split(",");
methodsToIgnore = new HashSet<String>();
for (int i = 0; i < methods.length; i++) {
methodsToIgnore.add(methods[i]);
}
}

@Override
public void doFilter(
ServletRequest request, ServletResponse response,
FilterChain chain) throws IOException, ServletException {
if (doXsrfFilter(request, response, methodsToIgnore, headerName)){
chain.doFilter(request, response);
}
}

@Override
public void destroy() {
// do nothing
}
};
}

// Method that provides similar filter functionality to filter-holder above, useful when
// calling from code that does not use filters as-is.
public static boolean doXsrfFilter(ServletRequest request, ServletResponse response,
Set<String> methodsToIgnore, String headerName) throws IOException, ServletException {
HttpServletRequest httpRequest = (HttpServletRequest)request;
if (methodsToIgnore == null) { methodsToIgnore = XSRF_METHODS_TO_IGNORE_DEFAULT ; }
if (headerName == null ) { headerName = XSRF_HEADER_DEFAULT; }
if (methodsToIgnore.contains(httpRequest.getMethod()) ||
httpRequest.getHeader(headerName) != null) {
return true;
} else {
((HttpServletResponse)response).sendError(
HttpServletResponse.SC_BAD_REQUEST,
"Missing Required Header for Vulnerability Protection");
response.getWriter().println(
"XSRF filter denial, requests must contain header : " + headerName);
return false;
}
}


}