commit()方法有详细的描述: 1.System.getenv(HADOOP_USER_NAME) 2.System.getProperty(HADOOP_USER_NAME) 3.use the OS user
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/package org.apache.hadoop.security;
importstatic org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
importstatic org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
importstatic org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
importstatic org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_TOKEN_FILES;
importstatic org.apache.hadoop.security.UGIExceptionMessages.*;
importstatic org.apache.hadoop.util.PlatformName.IBM_JAVA;
import com.google.common.annotations.VisibleForTesting;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.Principal;
import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import javax.security.auth.DestroyFailedException;
import javax.security.auth.Subject;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.kerberos.KerberosTicket;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
import javax.security.auth.login.Configuration.Parameters;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import javax.security.auth.spi.LoginModule;
import org.apache.hadoop.io.retry.RetryPolicies;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.MetricsRegistry;
import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.apache.hadoop.metrics2.lib.MutableQuantiles;
import org.apache.hadoop.metrics2.lib.MutableRate;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
import org.apache.hadoop.security.authentication.util.KerberosUtil;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* User and group information for Hadoop.
* This class wraps around a JAAS Subject and provides methods to determine the
* user's username and groups. It supports both the Windows, Unix and Kerberos
* login modules.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
publicclass UserGroupInformation {
@VisibleForTesting
staticfinal Logger LOG = LoggerFactory.getLogger(
UserGroupInformation.class);
/**
* Percentage of the ticket window to use before we renew ticket.
*/privatestaticfinalfloat TICKET_RENEW_WINDOW = 0.80f;
privatestaticboolean shouldRenewImmediatelyForTests = false;
staticfinal String HADOOP_USER_NAME = "HADOOP_USER_NAME";
staticfinal String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
/**
* For the purposes of unit tests, we want to test login
* from keytab and don't want to wait until the renew
* window (controlled by TICKET_RENEW_WINDOW).
* @param immediate true if we should login without waiting for ticket window
*/
@VisibleForTesting
publicstaticvoid setShouldRenewImmediatelyForTests(boolean immediate) {
shouldRenewImmediatelyForTests = immediate;
}
/**
* UgiMetrics maintains UGI activity statistics
* and publishes them through the metrics interfaces.
*/
@Metrics(about="User and group related metrics", context="ugi")
staticclass UgiMetrics {
final MetricsRegistry registry = new MetricsRegistry("UgiMetrics");
@Metric("Rate of successful kerberos logins and latency (milliseconds)")
MutableRate loginSuccess;
@Metric("Rate of failed kerberos logins and latency (milliseconds)")
MutableRate loginFailure;
@Metric("GetGroups") MutableRate getGroups;
MutableQuantiles[] getGroupsQuantiles;
@Metric("Renewal failures since startup")
private MutableGaugeLong renewalFailuresTotal;
@Metric("Renewal failures since last successful login")
private MutableGaugeInt renewalFailures;
static UgiMetrics create() {
return DefaultMetricsSystem.instance().register(new UgiMetrics());
}
staticvoid reattach() {
metrics = UgiMetrics.create();
}
void addGetGroups(long latency) {
getGroups.add(latency);
if (getGroupsQuantiles != null) {
for (MutableQuantiles q : getGroupsQuantiles) {
q.add(latency);
}
}
}
MutableGaugeInt getRenewalFailures() {
return renewalFailures;
}
}
/**
* A login module that looks at the Kerberos, Unix, or Windows principal and
* adds the corresponding UserName.
*/
@InterfaceAudience.Private
publicstaticclass HadoopLoginModule implements LoginModule {
private Subject subject;
@Override
publicboolean abort() throws LoginException {
returntrue;
}
privateextends Principal> T getCanonicalUser(Class cls) {
for(T user: subject.getPrincipals(cls)) {
return user;
}
returnnull;
}
@Override
publicboolean commit() throws LoginException {
if (LOG.isDebugEnabled()) {
LOG.debug("hadoop login commit");
}
// if we already have a user, we are done.if (!subject.getPrincipals(User.class).isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug("using existing subject:"+subject.getPrincipals());
}
returntrue;
}
Principal user = getCanonicalUser(KerberosPrincipal.class);
if (user != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("using kerberos user:"+user);
}
}
//If we don't have a kerberos user and security is disabled, check
//if user is specified in the environment or propertiesif (!isSecurityEnabled() && (user == null)) {
String envUser = System.getenv(HADOOP_USER_NAME);
if (envUser == null) {
envUser = System.getProperty(HADOOP_USER_NAME);
}
user = envUser == null ? null : new User(envUser);
}
// use the OS userif (user == null) {
user = getCanonicalUser(OS_PRINCIPAL_CLASS);
if (LOG.isDebugEnabled()) {
LOG.debug("using local user:"+user);
}
}
// if we found the user, add our principalif (user != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Using user: \"" + user + "\" with name " + user.getName());
}
User userEntry = null;
try {
// LoginContext will be attached later unless it's an external
// subject.
AuthenticationMethod authMethod = (user instanceof KerberosPrincipal)
? AuthenticationMethod.KERBEROS : AuthenticationMethod.SIMPLE;
userEntry = new User(user.getName(), authMethod, null);
} catch (Exception e) {
throw (LoginException)(new LoginException(e.toString()).initCause(e));
}
if (LOG.isDebugEnabled()) {
LOG.debug("User entry: \"" + userEntry.toString() + "\"" );
}
subject.getPrincipals().add(userEntry);
returntrue;
}
LOG.error("Can't find user in " + subject);
thrownew LoginException("Can't find user name");
}
@Override
publicvoid initialize(Subject subject, CallbackHandler callbackHandler,
Map sharedState, Map options) {
this.subject = subject;
}
@Override
publicboolean login() throws LoginException {
if (LOG.isDebugEnabled()) {
LOG.debug("hadoop login");
}
returntrue;
}
@Override
publicboolean logout() throws LoginException {
if (LOG.isDebugEnabled()) {
LOG.debug("hadoop logout");
}
returntrue;
}
}
/**
* Reattach the class's metrics to a new metric system.
*/publicstaticvoid reattachMetrics() {
UgiMetrics.reattach();
}
/** Metrics to track UGI activity */static UgiMetrics metrics = UgiMetrics.create();
/** The auth method to use */privatestatic AuthenticationMethod authenticationMethod;
/** Server-side groups fetching service */privatestatic Groups groups;
/** Min time (in seconds) before relogin for Kerberos */privatestaticlong kerberosMinSecondsBeforeRelogin;
/** The configuration to use */privatestatic Configuration conf;
/**Environment variable pointing to the token cache file*/publicstaticfinal String HADOOP_TOKEN_FILE_LOCATION =
"HADOOP_TOKEN_FILE_LOCATION";
publicstaticboolean isInitialized() {
return conf != null;
}
/**
* A method to initialize the fields that depend on a configuration.
* Must be called before useKerberos or groups is used.
*/privatestaticvoid ensureInitialized() {
if (!isInitialized()) {
synchronized(UserGroupInformation.class) {
if (!isInitialized()) { // someone might have beat us
initialize(new Configuration(), false);
}
}
}
}
/**
* Initialize UGI and related classes.
* @param conf the configuration to use
*/privatestaticsynchronizedvoid initialize(Configuration conf,
boolean overrideNameRules) {
authenticationMethod = SecurityUtil.getAuthenticationMethod(conf);
if (overrideNameRules || !HadoopKerberosName.hasRulesBeenSet()) {
try {
HadoopKerberosName.setConfiguration(conf);
} catch (IOException ioe) {
thrownew RuntimeException(
"Problem with Kerberos auth_to_local name configuration", ioe);
}
}
try {
kerberosMinSecondsBeforeRelogin = 1000L * conf.getLong(
HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN,
HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT);
}
catch(NumberFormatException nfe) {
thrownew IllegalArgumentException("Invalid attribute value for " +
HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN + " of " +
conf.get(HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN));
}
// If we haven't set up testing groups, use the configuration to find itif (!(groups instanceof TestingGroups)) {
groups = Groups.getUserToGroupsMappingService(conf);
}
UserGroupInformation.conf = conf;
if (metrics.getGroupsQuantiles == null) {
int[] intervals = conf.getInts(HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS);
if (intervals != null && intervals.length > 0) {
finalint length = intervals.length;
MutableQuantiles[] getGroupsQuantiles = new MutableQuantiles[length];
for (int i = 0; i < length; i++) {
getGroupsQuantiles[i] = metrics.registry.newQuantiles(
"getGroups" + intervals[i] + "s",
"Get groups", "ops", "latency", intervals[i]);
}
metrics.getGroupsQuantiles = getGroupsQuantiles;
}
}
}
/**
* Set the static configuration for UGI.
* In particular, set the security authentication mechanism and the
* group look up service.
* @param conf the configuration to use
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
publicstaticvoid setConfiguration(Configuration conf) {
initialize(conf, true);
}
@InterfaceAudience.Private
@VisibleForTesting
publicstaticvoid reset() {
authenticationMethod = null;
conf = null;
groups = null;
kerberosMinSecondsBeforeRelogin = 0;
setLoginUser(null);
HadoopKerberosName.setRules(null);
}
/**
* Determine if UserGroupInformation is using Kerberos to determine
* user identities or is relying on simple authentication
*
* @return true if UGI is working in a secure environment
*/publicstaticboolean isSecurityEnabled() {
return !isAuthenticationMethodEnabled(AuthenticationMethod.SIMPLE);
}
@InterfaceAudience.Private
@InterfaceStability.Evolving
privatestaticboolean isAuthenticationMethodEnabled(AuthenticationMethod method) {
ensureInitialized();
return (authenticationMethod == method);
}
/**
* Information about the logged in user.
*/privatestaticfinal AtomicReference loginUserRef =
new AtomicReference<>();
privatefinal Subject subject;
// All non-static fields must be read-only caches that come from the subject.privatefinal User user;
privatestatic String OS_LOGIN_MODULE_NAME;
privatestatic Classextends Principal> OS_PRINCIPAL_CLASS;
privatestaticfinalboolean windows =
System.getProperty("os.name").startsWith("Windows");
privatestaticfinalboolean is64Bit =
System.getProperty("os.arch").contains("64") ||
System.getProperty("os.arch").contains("s390x");
privatestaticfinalboolean aix = System.getProperty("os.name").equals("AIX");
/* Return the OS login module class name */privatestatic String getOSLoginModuleName() {
if (IBM_JAVA) {
if (windows) {
return is64Bit ? "com.ibm.security.auth.module.Win64LoginModule"
: "com.ibm.security.auth.module.NTLoginModule";
} elseif (aix) {
return is64Bit ? "com.ibm.security.auth.module.AIX64LoginModule"
: "com.ibm.security.auth.module.AIXLoginModule";
} else {
return "com.ibm.security.auth.module.LinuxLoginModule";
}
} else {
return windows ? "com.sun.security.auth.module.NTLoginModule"
: "com.sun.security.auth.module.UnixLoginModule";
}
}
/* Return the OS principal class */
@SuppressWarnings("unchecked")
privatestatic Classextends Principal> getOsPrincipalClass() {
ClassLoader cl = ClassLoader.getSystemClassLoader();
try {
String principalClass = null;
if (IBM_JAVA) {
if (is64Bit) {
principalClass = "com.ibm.security.auth.UsernamePrincipal";
} else {
if (windows) {
principalClass = "com.ibm.security.auth.NTUserPrincipal";
} elseif (aix) {
principalClass = "com.ibm.security.auth.AIXPrincipal";
} else {
principalClass = "com.ibm.security.auth.LinuxPrincipal";
}
}
} else {
principalClass = windows ? "com.sun.security.auth.NTUserPrincipal"
: "com.sun.security.auth.UnixPrincipal";
}
return (Classextends Principal>) cl.loadClass(principalClass);
} catch (ClassNotFoundException e) {
LOG.error("Unable to find JAAS classes:" + e.getMessage());
}
returnnull;
}
static {
OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
OS_PRINCIPAL_CLASS = getOsPrincipalClass();
}
privatestaticclass RealUser implements Principal {
privatefinal UserGroupInformation realUser;
RealUser(UserGroupInformation realUser) {
this.realUser = realUser;
}
@Override
public String getName() {
return realUser.getUserName();
}
public UserGroupInformation getRealUser() {
return realUser;
}
@Override
publicboolean equals(Object o) {
if (this == o) {
returntrue;
} elseif (o == null || getClass() != o.getClass()) {
returnfalse;
} else {
return realUser.equals(((RealUser) o).realUser);
}
}
@Override
publicint hashCode() {
return realUser.hashCode();
}
@Override
public String toString() {
return realUser.toString();
}
}
privatestatic HadoopLoginContext
newLoginContext(String appName, Subject subject,
HadoopConfiguration loginConf)
throws LoginException {
// Temporarily switch the thread's ContextClassLoader to match this
// class's classloader, so that we can properly load HadoopLoginModule
// from the JAAS libraries.
Thread t = Thread.currentThread();
ClassLoader oldCCL = t.getContextClassLoader();
t.setContextClassLoader(HadoopLoginModule.class.getClassLoader());
try {
returnnew HadoopLoginContext(appName, subject, loginConf);
} finally {
t.setContextClassLoader(oldCCL);
}
}
// return the LoginContext only if it's managed by the ugi. externally
// managed login contexts will be ignored.private HadoopLoginContext getLogin() {
LoginContext login = user.getLogin();
return (login instanceof HadoopLoginContext)
? (HadoopLoginContext)login : null;
}
privatevoid setLogin(LoginContext login) {
user.setLogin(login);
}
/**
* Create a UserGroupInformation for the given subject.
* This does not change the subject or acquire new credentials.
*
* The creator of subject is responsible for renewing credentials.
* @param subject the user's subject
*/
UserGroupInformation(Subject subject) {
this.subject = subject;
// do not access ANY private credentials since they are mutable
// during a relogin. no principal locking necessary since
// relogin/logout does not remove User principal.this.user = subject.getPrincipals(User.class).iterator().next();
if (user == null || user.getName() == null) {
thrownew IllegalStateException("Subject does not contain a valid User");
}
}
/**
* checks if logged in using kerberos
* @return true if the subject logged via keytab or has a Kerberos TGT
*/publicboolean hasKerberosCredentials() {
return user.getAuthenticationMethod() == AuthenticationMethod.KERBEROS;
}
/**
* Return the current user, including any doAs in the current stack.
* @return the current user
* @throws IOException if login fails
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
publicstatic UserGroupInformation getCurrentUser() throws IOException {
AccessControlContext context = AccessController.getContext();
Subject subject = Subject.getSubject(context);
if (subject == null || subject.getPrincipals(User.class).isEmpty()) {
return getLoginUser();
} else {
returnnew UserGroupInformation(subject);
}
}
/**
* Find the most appropriate UserGroupInformation to use
*
* @param ticketCachePath The Kerberos ticket cache path, or NULL
* if none is specfied
* @param user The user name, or NULL if none is specified.
*
* @return The most appropriate UserGroupInformation
*/publicstatic UserGroupInformation getBestUGI(
String ticketCachePath, String user) throws IOException {
if (ticketCachePath != null) {
return getUGIFromTicketCache(ticketCachePath, user);
} elseif (user == null) {
return getCurrentUser();
} else {
return createRemoteUser(user);
}
}
/**
* Create a UserGroupInformation from a Kerberos ticket cache.
*
* @param user The principal name to load from the ticket
* cache
* @param ticketCache the path to the ticket cache file
*
* @throws IOException if the kerberos login fails
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
publicstatic UserGroupInformation getUGIFromTicketCache(
String ticketCache, String user) throws IOException {
if (!isAuthenticationMethodEnabled(AuthenticationMethod.KERBEROS)) {
return getBestUGI(null, user);
}
LoginParams params = new LoginParams();
params.put(LoginParam.PRINCIPAL, user);
params.put(LoginParam.CCACHE, ticketCache);
return doSubjectLogin(null, params);
}
/**
* Create a UserGroupInformation from a Subject with Kerberos principal.
*
* @param subject The KerberosPrincipal to use in UGI.
* The creator of subject is responsible for
* renewing credentials.
*
* @throws IOException
* @throws KerberosAuthException if the kerberos login fails
*/publicstatic UserGroupInformation getUGIFromSubject(Subject subject)
throws IOException {
if (subject == null) {
thrownew KerberosAuthException(SUBJECT_MUST_NOT_BE_NULL);
}
if (subject.getPrincipals(KerberosPrincipal.class).isEmpty()) {
thrownew KerberosAuthException(SUBJECT_MUST_CONTAIN_PRINCIPAL);
}
// null params indicate external subject login. no login context will
// be attached.return doSubjectLogin(subject, null);
}
/**
* Get the currently logged in user. If no explicit login has occurred,
* the user will automatically be logged in with either kerberos credentials
* if available, or as the local OS user, based on security settings.
* @return the logged in user
* @throws IOException if login fails
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
publicstatic UserGroupInformation getLoginUser() throws IOException {
UserGroupInformation loginUser = loginUserRef.get();
// a potential race condition exists only for the initial creation of
// the login user. there's no need to penalize all subsequent calls
// with sychronization overhead so optimistically create a login user
// and discard if we lose the race.if (loginUser == null) {
UserGroupInformation newLoginUser = createLoginUser(null);
do {
// it's extremely unlikely that the login user will be non-null
// (lost CAS race), but be nulled before the subsequent get, but loop
// for correctness.if (loginUserRef.compareAndSet(null, newLoginUser)) {
loginUser = newLoginUser;
// only spawn renewal if this login user is the winner.
loginUser.spawnAutoRenewalThreadForUserCreds(false);
} else {
loginUser = loginUserRef.get();
}
} while (loginUser == null);
}
return loginUser;
}
/**
* remove the login method that is followed by a space from the username
* e.g. "jack (auth:SIMPLE)" -> "jack"
*
* @param userName
* @return userName without login method
*/publicstatic String trimLoginMethod(String userName) {
int spaceIndex = userName.indexOf(' ');
if (spaceIndex >= 0) {
userName = userName.substring(0, spaceIndex);
}
return userName;
}
/**
* Log in a user using the given subject
* @param subject the subject to use when logging in a user, or null to
* create a new subject.
*
* If subject is not null, the creator of subject is responsible for renewing
* credentials.
*
* @throws IOException if login fails
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
publicstaticvoid loginUserFromSubject(Subject subject) throws IOException {
setLoginUser(createLoginUser(subject));
}
privatestatic
UserGroupInformation createLoginUser(Subject subject) throws IOException {
UserGroupInformation realUser = doSubjectLogin(subject, null);
UserGroupInformation loginUser = null;
try {
// If the HADOOP_PROXY_USER environment variable or property
// is specified, create a proxy user as the logged in user.
String proxyUser = System.getenv(HADOOP_PROXY_USER);
if (proxyUser == null) {
proxyUser = System.getProperty(HADOOP_PROXY_USER);
}
loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, realUser);
String tokenFileLocation = System.getProperty(HADOOP_TOKEN_FILES);
if (tokenFileLocation == null) {
tokenFileLocation = conf.get(HADOOP_TOKEN_FILES);
}
if (tokenFileLocation != null) {
for (String tokenFileName:
StringUtils.getTrimmedStrings(tokenFileLocation)) {
if (tokenFileName.length() > 0) {
File tokenFile = new File(tokenFileName);
if (tokenFile.exists() && tokenFile.isFile()) {
Credentials cred = Credentials.readTokenStorageFile(
tokenFile, conf);
loginUser.addCredentials(cred);
} else {
LOG.info("tokenFile("+tokenFileName+") does not exist");
}
}
}
}
String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
if (fileLocation != null) {
// Load the token storage file and put all of the tokens into the
// user. Don't use the FileSystem API for reading since it has a lock
// cycle (HADOOP-9212).
File source = new File(fileLocation);
LOG.debug("Reading credentials from location set in {}: {}",
HADOOP_TOKEN_FILE_LOCATION,
source.getCanonicalPath());
if (!source.isFile()) {
thrownew FileNotFoundException("Source file "
+ source.getCanonicalPath() + " from "
+ HADOOP_TOKEN_FILE_LOCATION
+ " not found");
}
Credentials cred = Credentials.readTokenStorageFile(
source, conf);
LOG.debug("Loaded {} tokens", cred.numberOfTokens());
loginUser.addCredentials(cred);
}
} catch (IOException ioe) {
LOG.debug("failure to load login credentials", ioe);
throw ioe;
}
if (LOG.isDebugEnabled()) {
LOG.debug("UGI loginUser:"+loginUser);
}
return loginUser;
}
@InterfaceAudience.Private
@InterfaceStability.Unstable
@VisibleForTesting
publicstaticvoid setLoginUser(UserGroupInformation ugi) {
// if this is to become stable, should probably logout the currently
// logged in ugi if it's different loginUserRef.set(ugi);
}
private String getKeytab() {
HadoopLoginContext login = getLogin();
return (login != null)
? login.getConfiguration().getParameters().get(LoginParam.KEYTAB)
: null;
}
/**
* Is the ugi managed by the UGI or an external subject?
* @return true if managed by UGI.
*/privateboolean isHadoopLogin() {
// checks if the private hadoop login context is managing the ugi.return getLogin() != null;
}
/**
* Is this user logged in from a keytab file managed by the UGI?
* @return true if the credentials are from a keytab file.
*/publicboolean isFromKeytab() {
// can't simply check if keytab is present since a relogin failure will
// have removed the keytab from priv creds. instead, check login params.return hasKerberosCredentials() && isHadoopLogin() && getKeytab() != null;
}
/**
* Is this user logged in from a ticket (but no keytab) managed by the UGI?
* @return true if the credentials are from a ticket cache.
*/privateboolean isFromTicket() {
return hasKerberosCredentials() && isHadoopLogin() && getKeytab() == null;
}
/**
* Get the Kerberos TGT
* @return the user's TGT or null if none was found
*/private KerberosTicket getTGT() {
Set tickets = subject
.getPrivateCredentials(KerberosTicket.class);
for (KerberosTicket ticket : tickets) {
if (SecurityUtil.isOriginalTGT(ticket)) {
return ticket;
}
}
returnnull;
}
privatelong getRefreshTime(KerberosTicket tgt) {
long start = tgt.getStartTime().getTime();
long end = tgt.getEndTime().getTime();
return start + (long) ((end - start) * TICKET_RENEW_WINDOW);
}
privateboolean shouldRelogin() {
return hasKerberosCredentials() && isHadoopLogin();
}
@InterfaceAudience.Private
@InterfaceStability.Unstable
@VisibleForTesting
/**
* Spawn a thread to do periodic renewals of kerberos credentials from
* a ticket cache. NEVER directly call this method.
* @param force - used by tests to forcibly spawn thread
*/void spawnAutoRenewalThreadForUserCreds(boolean force) {
if (!force && (!shouldRelogin() || isFromKeytab())) {
return;
}
//spawn thread only if we have kerb credentials
Thread t = new Thread(new Runnable() {
@Override
publicvoid run() {
String cmd = conf.get("hadoop.kerberos.kinit.command", "kinit");
KerberosTicket tgt = getTGT();
if (tgt == null) {
return;
}
long nextRefresh = getRefreshTime(tgt);
RetryPolicy rp = null;
while (true) {
try {
long now = Time.now();
if (LOG.isDebugEnabled()) {
LOG.debug("Current time is " + now);
LOG.debug("Next refresh is " + nextRefresh);
}
if (now < nextRefresh) {
Thread.sleep(nextRefresh - now);
}
Shell.execCommand(cmd, "-R");
if (LOG.isDebugEnabled()) {
LOG.debug("renewed ticket");
}
reloginFromTicketCache();
tgt = getTGT();
if (tgt == null) {
LOG.warn("No TGT after renewal. Aborting renew thread for " +
getUserName());
return;
}
nextRefresh = Math.max(getRefreshTime(tgt),
now + kerberosMinSecondsBeforeRelogin);
metrics.renewalFailures.set(0);
rp = null;
} catch (InterruptedException ie) {
LOG.warn("Terminating renewal thread");
return;
} catch (IOException ie) {
metrics.renewalFailuresTotal.incr();
finallong tgtEndTime = tgt.getEndTime().getTime();
LOG.warn("Exception encountered while running the renewal "
+ "command for {}. (TGT end time:{}, renewalFailures: {},"
+ "renewalFailuresTotal: {})", getUserName(), tgtEndTime,
metrics.renewalFailures, metrics.renewalFailuresTotal, ie);
finallong now = Time.now();
if (rp == null) {
// Use a dummy maxRetries to create the policy. The policy will
// only be used to get next retry time with exponential back-off.
// The final retry time will be later limited within the
// tgt endTime in getNextTgtRenewalTime.
rp = RetryPolicies.exponentialBackoffRetry(Long.SIZE - 2,
kerberosMinSecondsBeforeRelogin, TimeUnit.MILLISECONDS);
}
try {
nextRefresh = getNextTgtRenewalTime(tgtEndTime, now, rp);
} catch (Exception e) {
LOG.error("Exception when calculating next tgt renewal time", e);
return;
}
metrics.renewalFailures.incr();
// retry until close enough to tgt endTime.if (now > nextRefresh) {
LOG.error("TGT is expired. Aborting renew thread for {}.",
getUserName());
return;
}
}
}
}
});
t.setDaemon(true);
t.setName("TGT Renewer for " + getUserName());
t.start();
}
/**
* Get time for next login retry. This will allow the thread to retry with
* exponential back-off, until tgt endtime.
* Last retry is {@link #kerberosMinSecondsBeforeRelogin} before endtime.
*
* @param tgtEndTime EndTime of the tgt.
* @param now Current time.
* @param rp The retry policy.
* @return Time for next login retry.
*/
@VisibleForTesting
staticlong getNextTgtRenewalTime(finallong tgtEndTime, finallong now,
final RetryPolicy rp) throws Exception {
finallong lastRetryTime = tgtEndTime - kerberosMinSecondsBeforeRelogin;
final RetryPolicy.RetryAction ra = rp.shouldRetry(null,
metrics.renewalFailures.value(), 0, false);
return Math.min(lastRetryTime, now + ra.delayMillis);
}
/**
* Log a user in from a keytab file. Loads a user identity from a keytab
* file and logs them in. They become the currently logged-in user.
* @param user the principal name to load from the keytab
* @param path the path to the keytab file
* @throws IOException
* @throws KerberosAuthException if it's a kerberos login exception.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
publicstaticvoid loginUserFromKeytab(String user,
String path
) throws IOException {
if (!isSecurityEnabled())
return;
setLoginUser(loginUserFromKeytabAndReturnUGI(user, path));
LOG.info("Login successful for user " + user
+ " using keytab file " + path);
}
/**
* Log the current user out who previously logged in using keytab.
* This method assumes that the user logged in by calling
* {@link #loginUserFromKeytab(String, String)}.
*
* @throws IOException
* @throws KerberosAuthException if a failure occurred in logout,
* or if the user did not log in by invoking loginUserFromKeyTab() before.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
publicvoid logoutUserFromKeytab() throws IOException {
if (!hasKerberosCredentials()) {
return;
}
HadoopLoginContext login = getLogin();
String keytabFile = getKeytab();
if (login == null || keytabFile == null) {
thrownew KerberosAuthException(MUST_FIRST_LOGIN_FROM_KEYTAB);
}
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Initiating logout for " + getUserName());
}
// hadoop login context internally locks credentials. login.logout();
} catch (LoginException le) {
KerberosAuthException kae = new KerberosAuthException(LOGOUT_FAILURE, le);
kae.setUser(user.toString());
kae.setKeytabFile(keytabFile);
throw kae;
}
LOG.info("Logout successful for user " + getUserName()
+ " using keytab file " + keytabFile);
}
/**
* Re-login a user from keytab if TGT is expired or is close to expiry.
*
* @throws IOException
* @throws KerberosAuthException if it's a kerberos login exception.
*/publicvoid checkTGTAndReloginFromKeytab() throws IOException {
reloginFromKeytab(true);
}
// if the first kerberos ticket is not TGT, then remove and destroy it since
// the kerberos library of jdk always use the first kerberos ticket as TGT.
// See HADOOP-13433 for more details. @VisibleForTesting
void fixKerberosTicketOrder() {
Set