Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,9 @@ public static class Builder {

private String hostName;
private boolean disallowFallbackToRandomSignerSecretProvider;
private String authFilterConfigurationPrefix = "hadoop.http.authentication.";
private final List<String> authFilterConfigurationPrefixes =
new ArrayList<>(Collections.singletonList(
"hadoop.http.authentication."));
private String excludeCiphers;

private boolean xFrameEnabled;
Expand Down Expand Up @@ -365,8 +367,15 @@ public Builder disallowFallbackToRandomSingerSecretProvider(boolean value) {
return this;
}

public Builder authFilterConfigurationPrefix(String value) {
this.authFilterConfigurationPrefix = value;
public Builder setAuthFilterConfigurationPrefix(String value) {
this.authFilterConfigurationPrefixes.clear();
this.authFilterConfigurationPrefixes.add(value);
return this;
}

public Builder setAuthFilterConfigurationPrefixes(String[] prefixes) {
this.authFilterConfigurationPrefixes.clear();
Collections.addAll(this.authFilterConfigurationPrefixes, prefixes);
return this;
}

Expand Down Expand Up @@ -473,8 +482,10 @@ public HttpServer2 build() throws IOException {
HttpServer2 server = new HttpServer2(this);

if (this.securityEnabled &&
!this.conf.get(authFilterConfigurationPrefix + "type").
equals(PseudoAuthenticationHandler.TYPE)) {
authFilterConfigurationPrefixes.stream().noneMatch(
prefix -> this.conf.get(prefix + "type")
.equals(PseudoAuthenticationHandler.TYPE))
) {
server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
}

Expand Down Expand Up @@ -811,18 +822,25 @@ private static SignerSecretProvider constructSecretProvider(final Builder b,
throws Exception {
final Configuration conf = b.conf;
Properties config = getFilterProperties(conf,
b.authFilterConfigurationPrefix);
b.authFilterConfigurationPrefixes);
return AuthenticationFilter.constructSecretProvider(
ctx, config, b.disallowFallbackToRandomSignerSecretProvider);
}

private static Properties getFilterProperties(Configuration conf, String
prefix) {
Properties prop = new Properties();
Map<String, String> filterConfig = AuthenticationFilterInitializer
.getFilterConfigMap(conf, prefix);
prop.putAll(filterConfig);
return prop;
public static Properties getFilterProperties(Configuration conf, List<String> prefixes) {
Properties props = new Properties();
for (String prefix : prefixes) {
Map<String, String> filterConfigMap =
AuthenticationFilterInitializer.getFilterConfigMap(conf, prefix);
for (Map.Entry<String, String> entry : filterConfigMap.entrySet()) {
Object previous = props.setProperty(entry.getKey(), entry.getValue());
if (previous != null && !previous.equals(entry.getValue())) {
LOG.warn("Overwriting configuration for key='{}' with value='{}' " +
"previous value='{}'", entry.getKey(), entry.getValue(), previous);
}
}
}
return props;
}

private static void addNoCacheFilter(ServletContextHandler ctxt) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ public class KMSWebServer {
.setName(NAME)
.setConf(conf)
.setSSLConf(sslConf)
.authFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
.setAuthFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
.setACL(new AccessControlList(conf.get(
KMSConfiguration.HTTP_ADMINS_KEY, " ")))
.addEndpoint(endpoint)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
Expand All @@ -35,6 +36,8 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Map;
import java.util.Properties;

Expand All @@ -46,9 +49,9 @@
public class HttpFSAuthenticationFilter
extends DelegationTokenAuthenticationFilter {

static final String CONF_PREFIX = "httpfs.authentication.";

static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
public static final String CONF_PREFIX = "httpfs.authentication.";
public static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
static final String[] CONF_PREFIXES = {CONF_PREFIX, HADOOP_HTTP_CONF_PREFIX};

private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
+ ".file";
Expand All @@ -69,27 +72,9 @@ public class HttpFSAuthenticationFilter
@Override
protected Properties getConfiguration(String configPrefix,
FilterConfig filterConfig) throws ServletException{
Properties props = new Properties();
Configuration conf = HttpFSServerWebApp.get().getConfig();

props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
for (Map.Entry<String, String> entry : conf) {
String name = entry.getKey();
if (name.startsWith(HADOOP_HTTP_CONF_PREFIX)) {
name = name.substring(HADOOP_HTTP_CONF_PREFIX.length());
props.setProperty(name, entry.getValue());
}
}

// Replace Hadoop Http Authentication Configs with HttpFS specific Configs
for (Map.Entry<String, String> entry : conf) {
String name = entry.getKey();
if (name.startsWith(CONF_PREFIX)) {
String value = conf.get(name);
name = name.substring(CONF_PREFIX.length());
props.setProperty(name, value);
}
}
Properties props = HttpServer2.getFilterProperties(conf,
new ArrayList<>(Arrays.asList(CONF_PREFIXES)));

String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
if (signatureSecretFile == null) {
Expand All @@ -106,8 +91,16 @@ protected Properties getConfiguration(String configPrefix,
secret.append((char) c);
c = reader.read();
}

String secretString = secret.toString();
if (secretString.isEmpty()) {
throw new RuntimeException(
"No secret in HttpFs signature secret file: "
+ signatureSecretFile);
}

props.setProperty(AuthenticationFilter.SIGNATURE_SECRET,
secret.toString());
secretString);
} catch (IOException ex) {
throw new RuntimeException("Could not read HttpFS signature "
+ "secret file: " + signatureSecretFile);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,14 @@
*/
package org.apache.hadoop.fs.http.server;

import com.google.common.annotations.VisibleForTesting;

import static org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.CONF_PREFIX;
import static org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX;
import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.AUTH_TYPE;
import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.SIGNATURE_SECRET_FILE;
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.KEYTAB;
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.PRINCIPAL;
import static org.apache.hadoop.util.StringUtils.startupShutdownMessage;

import java.io.IOException;
Expand Down Expand Up @@ -65,6 +73,7 @@ public class HttpFSServerWebServer {
private static final String SERVLET_PATH = "/webhdfs";

static {
addDeprecatedKeys();
Configuration.addDefaultResource(HTTPFS_DEFAULT_XML);
Configuration.addDefaultResource(HTTPFS_SITE_XML);
}
Expand Down Expand Up @@ -124,7 +133,8 @@ public class HttpFSServerWebServer {
.setName(NAME)
.setConf(conf)
.setSSLConf(sslConf)
.authFilterConfigurationPrefix(HttpFSAuthenticationFilter.CONF_PREFIX)
.setAuthFilterConfigurationPrefixes(
HttpFSAuthenticationFilter.CONF_PREFIXES)
.setACL(new AccessControlList(conf.get(HTTP_ADMINS_KEY, " ")))
.addEndpoint(endpoint)
.build();
Expand Down Expand Up @@ -178,6 +188,11 @@ public URL getUrl() {
}
}

@VisibleForTesting
HttpServer2 getHttpServer() {
return httpServer;
}

public static void main(String[] args) throws Exception {
startupShutdownMessage(HttpFSServerWebServer.class, args, LOG);
Configuration conf = new Configuration(true);
Expand All @@ -187,4 +202,17 @@ public static void main(String[] args) throws Exception {
webServer.start();
webServer.join();
}

public static void addDeprecatedKeys() {
Configuration.addDeprecations(new Configuration.DeprecationDelta[]{
new Configuration.DeprecationDelta(CONF_PREFIX + KEYTAB,
HADOOP_HTTP_CONF_PREFIX + KEYTAB),
new Configuration.DeprecationDelta(CONF_PREFIX + PRINCIPAL,
HADOOP_HTTP_CONF_PREFIX + PRINCIPAL),
new Configuration.DeprecationDelta(CONF_PREFIX + SIGNATURE_SECRET_FILE,
HADOOP_HTTP_CONF_PREFIX + SIGNATURE_SECRET_FILE),
new Configuration.DeprecationDelta(CONF_PREFIX + AUTH_TYPE,
HADOOP_HTTP_CONF_PREFIX + AUTH_TYPE)
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,8 @@
If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
they should share the secret file.

If the secret file specified here does not exist, random secret is
generated at startup time.
If the secret file specified here does not exist or it is empty, a random
secret is generated at startup time.

httpfs.authentication.signature.secret.file is deprecated. Instead use
hadoop.http.authentication.signature.secret.file.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter;
import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
Expand All @@ -58,6 +59,7 @@
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.test.HFSTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.LambdaTestUtils;
Expand Down Expand Up @@ -148,7 +150,8 @@ private void createHttpFSServer() throws Exception {
HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE, secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
conf.writeXml(os);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.test.HTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir;
Expand Down Expand Up @@ -128,8 +129,9 @@ private void createHttpFSServer() throws Exception {
conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file",
secretFile.getAbsolutePath());
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());

File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -231,8 +231,9 @@ private Configuration createHttpFSConf(boolean addDelegationTokenAuthHandler,
// HTTPFS configuration
conf = new Configuration(false);
if (addDelegationTokenAuthHandler) {
conf.set("httpfs.authentication.type",
HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.AUTH_TYPE,
HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
}
conf.set("httpfs.services.ext", MockGroups.class.getName());
conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.
Expand All @@ -243,8 +244,9 @@ private Configuration createHttpFSConf(boolean addDelegationTokenAuthHandler,
conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file",
secretFile.getAbsolutePath());
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
if (sslEnabled) {
conf.set("httpfs.ssl.enabled", "true");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.test.HTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir;
Expand Down Expand Up @@ -136,8 +137,9 @@ private void createHttpFSServer() throws Exception {
conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file",
secretFile.getAbsolutePath());
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());

File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.test.HTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir;
Expand Down Expand Up @@ -137,8 +138,9 @@ private void createHttpFSServer() throws Exception {
conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file",
secretFile.getAbsolutePath());
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());

File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
Expand Down
Loading