Back to home page

OSCL-LXR

 
 

    


0001 /**
0002  * Licensed to the Apache Software Foundation (ASF) under one
0003  * or more contributor license agreements.  See the NOTICE file
0004  * distributed with this work for additional information
0005  * regarding copyright ownership.  The ASF licenses this file
0006  * to you under the Apache License, Version 2.0 (the
0007  * "License"); you may not use this file except in compliance
0008  * with the License.  You may obtain a copy of the License at
0009  *
0010  *     http://www.apache.org/licenses/LICENSE-2.0
0011  *
0012  * Unless required by applicable law or agreed to in writing, software
0013  * distributed under the License is distributed on an "AS IS" BASIS,
0014  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0015  * See the License for the specific language governing permissions and
0016  * limitations under the License.
0017  */
0018 package org.apache.hive.service.auth;
0019 
0020 import java.io.IOException;
0021 import java.lang.reflect.Field;
0022 import java.lang.reflect.Method;
0023 import java.net.InetSocketAddress;
0024 import java.net.UnknownHostException;
0025 import java.util.ArrayList;
0026 import java.util.Arrays;
0027 import java.util.HashMap;
0028 import java.util.List;
0029 import java.util.Locale;
0030 import java.util.Map;
0031 import java.util.Objects;
0032 
0033 import javax.net.ssl.SSLServerSocket;
0034 import javax.security.auth.login.LoginException;
0035 import javax.security.sasl.Sasl;
0036 
0037 import org.apache.hadoop.hive.conf.HiveConf;
0038 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
0039 import org.apache.hadoop.hive.metastore.HiveMetaStore;
0040 import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
0041 import org.apache.hadoop.hive.metastore.api.MetaException;
0042 import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
0043 import org.apache.hadoop.hive.shims.ShimLoader;
0044 import org.apache.hadoop.hive.thrift.DBTokenStore;
0045 import org.apache.hadoop.hive.thrift.HiveDelegationTokenManager;
0046 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
0047 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
0048 import org.apache.hadoop.security.SecurityUtil;
0049 import org.apache.hadoop.security.UserGroupInformation;
0050 import org.apache.hadoop.security.authorize.ProxyUsers;
0051 import org.apache.hive.service.cli.HiveSQLException;
0052 import org.apache.hive.service.cli.thrift.ThriftCLIService;
0053 import org.apache.thrift.TProcessorFactory;
0054 import org.apache.thrift.transport.TSSLTransportFactory;
0055 import org.apache.thrift.transport.TServerSocket;
0056 import org.apache.thrift.transport.TSocket;
0057 import org.apache.thrift.transport.TTransport;
0058 import org.apache.thrift.transport.TTransportException;
0059 import org.apache.thrift.transport.TTransportFactory;
0060 import org.slf4j.Logger;
0061 import org.slf4j.LoggerFactory;
0062 
0063 /**
0064  * This class helps in some aspects of authentication. It creates the proper Thrift classes for the
0065  * given configuration as well as helps with authenticating requests.
0066  */
0067 public class HiveAuthFactory {
0068   private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
0069 
0070 
0071   public enum AuthTypes {
0072     NOSASL("NOSASL"),
0073     NONE("NONE"),
0074     LDAP("LDAP"),
0075     KERBEROS("KERBEROS"),
0076     CUSTOM("CUSTOM"),
0077     PAM("PAM");
0078 
0079     private final String authType;
0080 
0081     AuthTypes(String authType) {
0082       this.authType = authType;
0083     }
0084 
0085     public String getAuthName() {
0086       return authType;
0087     }
0088 
0089   }
0090 
0091   private HadoopThriftAuthBridge.Server saslServer;
0092   private String authTypeStr;
0093   private final String transportMode;
0094   private final HiveConf conf;
0095   private HiveDelegationTokenManager delegationTokenManager = null;
0096 
0097   public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
0098   public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
0099 
0100   private static Field keytabFile = null;
0101   private static Method getKeytab = null;
0102   static {
0103     Class<?> clz = UserGroupInformation.class;
0104     try {
0105       keytabFile = clz.getDeclaredField("keytabFile");
0106       keytabFile.setAccessible(true);
0107     } catch (NoSuchFieldException nfe) {
0108       LOG.debug("Cannot find private field \"keytabFile\" in class: " +
0109         UserGroupInformation.class.getCanonicalName(), nfe);
0110       keytabFile = null;
0111     }
0112 
0113     try {
0114       getKeytab = clz.getDeclaredMethod("getKeytab");
0115       getKeytab.setAccessible(true);
0116     } catch(NoSuchMethodException nme) {
0117       LOG.debug("Cannot find private method \"getKeytab\" in class:" +
0118         UserGroupInformation.class.getCanonicalName(), nme);
0119       getKeytab = null;
0120     }
0121   }
0122 
0123   public HiveAuthFactory(HiveConf conf) throws TTransportException, IOException {
0124     this.conf = conf;
0125     transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
0126     authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
0127 
0128     // In http mode we use NOSASL as the default auth type
0129     if ("http".equalsIgnoreCase(transportMode)) {
0130       if (authTypeStr == null) {
0131         authTypeStr = AuthTypes.NOSASL.getAuthName();
0132       }
0133     } else {
0134       if (authTypeStr == null) {
0135         authTypeStr = AuthTypes.NONE.getAuthName();
0136       }
0137       if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
0138         String principal = conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
0139         String keytab = conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
0140         if (needUgiLogin(UserGroupInformation.getCurrentUser(),
0141           SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keytab)) {
0142           saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer(principal, keytab);
0143         } else {
0144           // Using the default constructor to avoid unnecessary UGI login.
0145           saslServer = new HadoopThriftAuthBridge.Server();
0146         }
0147 
0148         // start delegation token manager
0149         delegationTokenManager = new HiveDelegationTokenManager();
0150         try {
0151           // rawStore is only necessary for DBTokenStore
0152           Object rawStore = null;
0153           String tokenStoreClass = conf.getVar(
0154               HiveConf.ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS);
0155 
0156           if (tokenStoreClass.equals(DBTokenStore.class.getName())) {
0157             HMSHandler baseHandler = new HiveMetaStore.HMSHandler(
0158                 "new db based metaserver", conf, true);
0159             rawStore = baseHandler.getMS();
0160           }
0161 
0162           delegationTokenManager.startDelegationTokenSecretManager(
0163               conf, rawStore, ServerMode.HIVESERVER2);
0164           saslServer.setSecretManager(delegationTokenManager.getSecretManager());
0165         }
0166         catch (MetaException|IOException e) {
0167           throw new TTransportException("Failed to start token manager", e);
0168         }
0169       }
0170     }
0171   }
0172 
0173   public Map<String, String> getSaslProperties() {
0174     Map<String, String> saslProps = new HashMap<String, String>();
0175     SaslQOP saslQOP = SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
0176     saslProps.put(Sasl.QOP, saslQOP.toString());
0177     saslProps.put(Sasl.SERVER_AUTH, "true");
0178     return saslProps;
0179   }
0180 
0181   public TTransportFactory getAuthTransFactory() throws LoginException {
0182     TTransportFactory transportFactory;
0183     if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
0184       try {
0185         transportFactory = saslServer.createTransportFactory(getSaslProperties());
0186       } catch (TTransportException e) {
0187         throw new LoginException(e.getMessage());
0188       }
0189     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName())) {
0190       transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
0191     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName())) {
0192       transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
0193     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName())) {
0194       transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
0195     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) {
0196       transportFactory = new TTransportFactory();
0197     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
0198       transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
0199     } else {
0200       throw new LoginException("Unsupported authentication type " + authTypeStr);
0201     }
0202     return transportFactory;
0203   }
0204 
0205   /**
0206    * Returns the thrift processor factory for HiveServer2 running in binary mode
0207    * @param service
0208    * @return
0209    * @throws LoginException
0210    */
0211   public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException {
0212     if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
0213       return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
0214     } else {
0215       return PlainSaslHelper.getPlainProcessorFactory(service);
0216     }
0217   }
0218 
0219   public String getRemoteUser() {
0220     return saslServer == null ? null : saslServer.getRemoteUser();
0221   }
0222 
0223   public String getIpAddress() {
0224     if (saslServer == null || saslServer.getRemoteAddress() == null) {
0225       return null;
0226     } else {
0227       return saslServer.getRemoteAddress().getHostAddress();
0228     }
0229   }
0230 
0231   // Perform kerberos login using the hadoop shim API if the configuration is available
0232   public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
0233     String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
0234     String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
0235     if (principal.isEmpty() || keyTabFile.isEmpty()) {
0236       throw new IOException("HiveServer2 Kerberos principal or keytab is not correctly configured");
0237     } else {
0238       UserGroupInformation.loginUserFromKeytab(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
0239     }
0240   }
0241 
0242   // Perform SPNEGO login using the hadoop shim API if the configuration is available
0243   public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(HiveConf hiveConf)
0244     throws IOException {
0245     String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
0246     String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
0247     if (principal.isEmpty() || keyTabFile.isEmpty()) {
0248       throw new IOException("HiveServer2 SPNEGO principal or keytab is not correctly configured");
0249     } else {
0250       return UserGroupInformation.loginUserFromKeytabAndReturnUGI(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
0251     }
0252   }
0253 
0254   public static TTransport getSocketTransport(String host, int port, int loginTimeout) {
0255     return new TSocket(host, port, loginTimeout);
0256   }
0257 
0258   public static TTransport getSSLSocket(String host, int port, int loginTimeout)
0259     throws TTransportException {
0260     return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
0261   }
0262 
0263   public static TTransport getSSLSocket(String host, int port, int loginTimeout,
0264     String trustStorePath, String trustStorePassWord) throws TTransportException {
0265     TSSLTransportFactory.TSSLTransportParameters params =
0266       new TSSLTransportFactory.TSSLTransportParameters();
0267     params.setTrustStore(trustStorePath, trustStorePassWord);
0268     params.requireClientAuth(true);
0269     return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
0270   }
0271 
0272   public static TServerSocket getServerSocket(String hiveHost, int portNum)
0273     throws TTransportException {
0274     InetSocketAddress serverAddress;
0275     if (hiveHost == null || hiveHost.isEmpty()) {
0276       // Wildcard bind
0277       serverAddress = new InetSocketAddress(portNum);
0278     } else {
0279       serverAddress = new InetSocketAddress(hiveHost, portNum);
0280     }
0281     return new TServerSocket(serverAddress);
0282   }
0283 
0284   public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath,
0285       String keyStorePassWord, List<String> sslVersionBlacklist) throws TTransportException,
0286       UnknownHostException {
0287     TSSLTransportFactory.TSSLTransportParameters params =
0288         new TSSLTransportFactory.TSSLTransportParameters();
0289     params.setKeyStore(keyStorePath, keyStorePassWord);
0290     InetSocketAddress serverAddress;
0291     if (hiveHost == null || hiveHost.isEmpty()) {
0292       // Wildcard bind
0293       serverAddress = new InetSocketAddress(portNum);
0294     } else {
0295       serverAddress = new InetSocketAddress(hiveHost, portNum);
0296     }
0297     TServerSocket thriftServerSocket =
0298         TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params);
0299     if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) {
0300       List<String> sslVersionBlacklistLocal = new ArrayList<String>();
0301       for (String sslVersion : sslVersionBlacklist) {
0302         sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase(Locale.ROOT));
0303       }
0304       SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket();
0305       List<String> enabledProtocols = new ArrayList<String>();
0306       for (String protocol : sslServerSocket.getEnabledProtocols()) {
0307         if (sslVersionBlacklistLocal.contains(protocol.toLowerCase(Locale.ROOT))) {
0308           LOG.debug("Disabling SSL Protocol: " + protocol);
0309         } else {
0310           enabledProtocols.add(protocol);
0311         }
0312       }
0313       sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new String[0]));
0314       LOG.info("SSL Server Socket Enabled Protocols: "
0315           + Arrays.toString(sslServerSocket.getEnabledProtocols()));
0316     }
0317     return thriftServerSocket;
0318   }
0319 
0320   // retrieve delegation token for the given user
0321   public String getDelegationToken(String owner, String renewer, String remoteAddr)
0322       throws HiveSQLException {
0323     if (delegationTokenManager == null) {
0324       throw new HiveSQLException(
0325           "Delegation token only supported over kerberos authentication", "08S01");
0326     }
0327 
0328     try {
0329       String tokenStr = delegationTokenManager.getDelegationTokenWithService(owner, renewer,
0330           HS2_CLIENT_TOKEN, remoteAddr);
0331       if (tokenStr == null || tokenStr.isEmpty()) {
0332         throw new HiveSQLException(
0333             "Received empty retrieving delegation token for user " + owner, "08S01");
0334       }
0335       return tokenStr;
0336     } catch (IOException e) {
0337       throw new HiveSQLException(
0338           "Error retrieving delegation token for user " + owner, "08S01", e);
0339     } catch (InterruptedException e) {
0340       throw new HiveSQLException("delegation token retrieval interrupted", "08S01", e);
0341     }
0342   }
0343 
0344   // cancel given delegation token
0345   public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
0346     if (delegationTokenManager == null) {
0347       throw new HiveSQLException(
0348           "Delegation token only supported over kerberos authentication", "08S01");
0349     }
0350     try {
0351       delegationTokenManager.cancelDelegationToken(delegationToken);
0352     } catch (IOException e) {
0353       throw new HiveSQLException(
0354           "Error canceling delegation token " + delegationToken, "08S01", e);
0355     }
0356   }
0357 
0358   public void renewDelegationToken(String delegationToken) throws HiveSQLException {
0359     if (delegationTokenManager == null) {
0360       throw new HiveSQLException(
0361           "Delegation token only supported over kerberos authentication", "08S01");
0362     }
0363     try {
0364       delegationTokenManager.renewDelegationToken(delegationToken);
0365     } catch (IOException e) {
0366       throw new HiveSQLException(
0367           "Error renewing delegation token " + delegationToken, "08S01", e);
0368     }
0369   }
0370 
0371   public String verifyDelegationToken(String delegationToken) throws HiveSQLException {
0372     if (delegationTokenManager == null) {
0373       throw new HiveSQLException(
0374           "Delegation token only supported over kerberos authentication", "08S01");
0375     }
0376     try {
0377       return delegationTokenManager.verifyDelegationToken(delegationToken);
0378     } catch (IOException e) {
0379       String msg =  "Error verifying delegation token " + delegationToken;
0380       LOG.error(msg, e);
0381       throw new HiveSQLException(msg, "08S01", e);
0382     }
0383   }
0384 
0385   public String getUserFromToken(String delegationToken) throws HiveSQLException {
0386     if (delegationTokenManager == null) {
0387       throw new HiveSQLException(
0388           "Delegation token only supported over kerberos authentication", "08S01");
0389     }
0390     try {
0391       return delegationTokenManager.getUserFromToken(delegationToken);
0392     } catch (IOException e) {
0393       throw new HiveSQLException(
0394           "Error extracting user from delegation token " + delegationToken, "08S01", e);
0395     }
0396   }
0397 
0398   public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
0399     HiveConf hiveConf) throws HiveSQLException {
0400     try {
0401       UserGroupInformation sessionUgi;
0402       if (UserGroupInformation.isSecurityEnabled()) {
0403         KerberosNameShim kerbName = ShimLoader.getHadoopShims().getKerberosNameShim(realUser);
0404         sessionUgi = UserGroupInformation.createProxyUser(
0405             kerbName.getServiceName(), UserGroupInformation.getLoginUser());
0406       } else {
0407         sessionUgi = UserGroupInformation.createRemoteUser(realUser);
0408       }
0409       if (!proxyUser.equalsIgnoreCase(realUser)) {
0410         ProxyUsers.refreshSuperUserGroupsConfiguration(hiveConf);
0411         ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, sessionUgi),
0412             ipAddress, hiveConf);
0413       }
0414     } catch (IOException e) {
0415       throw new HiveSQLException(
0416         "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, "08S01", e);
0417     }
0418   }
0419 
0420   public static boolean needUgiLogin(UserGroupInformation ugi, String principal, String keytab) {
0421     return null == ugi || !ugi.hasKerberosCredentials() || !ugi.getUserName().equals(principal) ||
0422       !Objects.equals(keytab, getKeytabFromUgi());
0423   }
0424 
0425   private static String getKeytabFromUgi() {
0426     synchronized (UserGroupInformation.class) {
0427       try {
0428         if (keytabFile != null) {
0429           return (String) keytabFile.get(null);
0430         } else if (getKeytab != null) {
0431           return (String) getKeytab.invoke(UserGroupInformation.getCurrentUser());
0432         } else {
0433           return null;
0434         }
0435       } catch (Exception e) {
0436         LOG.debug("Fail to get keytabFile path via reflection", e);
0437         return null;
0438       }
0439     }
0440   }
0441 }