Back to home page

OSCL-LXR

 
 

    


0001 /**
0002  * Licensed to the Apache Software Foundation (ASF) under one
0003  * or more contributor license agreements.  See the NOTICE file
0004  * distributed with this work for additional information
0005  * regarding copyright ownership.  The ASF licenses this file
0006  * to you under the Apache License, Version 2.0 (the
0007  * "License"); you may not use this file except in compliance
0008  * with the License.  You may obtain a copy of the License at
0009  *
0010  *     http://www.apache.org/licenses/LICENSE-2.0
0011  *
0012  * Unless required by applicable law or agreed to in writing, software
0013  * distributed under the License is distributed on an "AS IS" BASIS,
0014  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0015  * See the License for the specific language governing permissions and
0016  * limitations under the License.
0017  */
0018 package org.apache.hive.service.auth;
0019 
0020 import java.io.IOException;
0021 import java.lang.reflect.Field;
0022 import java.lang.reflect.Method;
0023 import java.net.InetSocketAddress;
0024 import java.net.UnknownHostException;
0025 import java.util.ArrayList;
0026 import java.util.Arrays;
0027 import java.util.HashMap;
0028 import java.util.List;
0029 import java.util.Locale;
0030 import java.util.Map;
0031 import java.util.Objects;
0032 
0033 import javax.net.ssl.SSLServerSocket;
0034 import javax.security.auth.login.LoginException;
0035 import javax.security.sasl.Sasl;
0036 
0037 import org.apache.hadoop.hive.conf.HiveConf;
0038 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
0039 import org.apache.hadoop.hive.metastore.HiveMetaStore;
0040 import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
0041 import org.apache.hadoop.hive.metastore.api.MetaException;
0042 import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
0043 import org.apache.hadoop.hive.shims.ShimLoader;
0044 import org.apache.hadoop.hive.thrift.DBTokenStore;
0045 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
0046 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
0047 import org.apache.hadoop.security.SecurityUtil;
0048 import org.apache.hadoop.security.UserGroupInformation;
0049 import org.apache.hadoop.security.authorize.ProxyUsers;
0050 import org.apache.hive.service.cli.HiveSQLException;
0051 import org.apache.hive.service.cli.thrift.ThriftCLIService;
0052 import org.apache.thrift.TProcessorFactory;
0053 import org.apache.thrift.transport.TSSLTransportFactory;
0054 import org.apache.thrift.transport.TServerSocket;
0055 import org.apache.thrift.transport.TSocket;
0056 import org.apache.thrift.transport.TTransport;
0057 import org.apache.thrift.transport.TTransportException;
0058 import org.apache.thrift.transport.TTransportFactory;
0059 import org.slf4j.Logger;
0060 import org.slf4j.LoggerFactory;
0061 
0062 /**
0063  * This class helps in some aspects of authentication. It creates the proper Thrift classes for the
0064  * given configuration as well as helps with authenticating requests.
0065  */
0066 public class HiveAuthFactory {
0067   private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
0068 
0069 
0070   public enum AuthTypes {
0071     NOSASL("NOSASL"),
0072     NONE("NONE"),
0073     LDAP("LDAP"),
0074     KERBEROS("KERBEROS"),
0075     CUSTOM("CUSTOM"),
0076     PAM("PAM");
0077 
0078     private final String authType;
0079 
0080     AuthTypes(String authType) {
0081       this.authType = authType;
0082     }
0083 
0084     public String getAuthName() {
0085       return authType;
0086     }
0087 
0088   }
0089 
0090   private HadoopThriftAuthBridge.Server saslServer;
0091   private String authTypeStr;
0092   private final String transportMode;
0093   private final HiveConf conf;
0094 
0095   public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
0096   public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
0097 
0098   private static Field keytabFile = null;
0099   private static Method getKeytab = null;
0100   static {
0101     Class<?> clz = UserGroupInformation.class;
0102     try {
0103       keytabFile = clz.getDeclaredField("keytabFile");
0104       keytabFile.setAccessible(true);
0105     } catch (NoSuchFieldException nfe) {
0106       LOG.debug("Cannot find private field \"keytabFile\" in class: " +
0107         UserGroupInformation.class.getCanonicalName(), nfe);
0108       keytabFile = null;
0109     }
0110 
0111     try {
0112       getKeytab = clz.getDeclaredMethod("getKeytab");
0113       getKeytab.setAccessible(true);
0114     } catch(NoSuchMethodException nme) {
0115       LOG.debug("Cannot find private method \"getKeytab\" in class:" +
0116         UserGroupInformation.class.getCanonicalName(), nme);
0117       getKeytab = null;
0118     }
0119   }
0120 
0121   public HiveAuthFactory(HiveConf conf) throws TTransportException, IOException {
0122     this.conf = conf;
0123     transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
0124     authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
0125 
0126     // In http mode we use NOSASL as the default auth type
0127     if ("http".equalsIgnoreCase(transportMode)) {
0128       if (authTypeStr == null) {
0129         authTypeStr = AuthTypes.NOSASL.getAuthName();
0130       }
0131     } else {
0132       if (authTypeStr == null) {
0133         authTypeStr = AuthTypes.NONE.getAuthName();
0134       }
0135       if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
0136         String principal = conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
0137         String keytab = conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
0138         if (needUgiLogin(UserGroupInformation.getCurrentUser(),
0139           SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keytab)) {
0140           saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer(principal, keytab);
0141         } else {
0142           // Using the default constructor to avoid unnecessary UGI login.
0143           saslServer = new HadoopThriftAuthBridge.Server();
0144         }
0145 
0146         // start delegation token manager
0147         try {
0148           // rawStore is only necessary for DBTokenStore
0149           Object rawStore = null;
0150           String tokenStoreClass = conf.getVar(HiveConf.ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS);
0151 
0152           if (tokenStoreClass.equals(DBTokenStore.class.getName())) {
0153             HMSHandler baseHandler = new HiveMetaStore.HMSHandler(
0154                 "new db based metaserver", conf, true);
0155             rawStore = baseHandler.getMS();
0156           }
0157 
0158           saslServer.startDelegationTokenSecretManager(conf, rawStore, ServerMode.HIVESERVER2);
0159         }
0160         catch (MetaException|IOException e) {
0161           throw new TTransportException("Failed to start token manager", e);
0162         }
0163       }
0164     }
0165   }
0166 
0167   public Map<String, String> getSaslProperties() {
0168     Map<String, String> saslProps = new HashMap<String, String>();
0169     SaslQOP saslQOP = SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
0170     saslProps.put(Sasl.QOP, saslQOP.toString());
0171     saslProps.put(Sasl.SERVER_AUTH, "true");
0172     return saslProps;
0173   }
0174 
0175   public TTransportFactory getAuthTransFactory() throws LoginException {
0176     TTransportFactory transportFactory;
0177     if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
0178       try {
0179         transportFactory = saslServer.createTransportFactory(getSaslProperties());
0180       } catch (TTransportException e) {
0181         throw new LoginException(e.getMessage());
0182       }
0183     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName())) {
0184       transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
0185     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName())) {
0186       transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
0187     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName())) {
0188       transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
0189     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) {
0190       transportFactory = new TTransportFactory();
0191     } else if (authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
0192       transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
0193     } else {
0194       throw new LoginException("Unsupported authentication type " + authTypeStr);
0195     }
0196     return transportFactory;
0197   }
0198 
0199   /**
0200    * Returns the thrift processor factory for HiveServer2 running in binary mode
0201    * @param service
0202    * @return
0203    * @throws LoginException
0204    */
0205   public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException {
0206     if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
0207       return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
0208     } else {
0209       return PlainSaslHelper.getPlainProcessorFactory(service);
0210     }
0211   }
0212 
0213   public String getRemoteUser() {
0214     return saslServer == null ? null : saslServer.getRemoteUser();
0215   }
0216 
0217   public String getIpAddress() {
0218     if (saslServer == null || saslServer.getRemoteAddress() == null) {
0219       return null;
0220     } else {
0221       return saslServer.getRemoteAddress().getHostAddress();
0222     }
0223   }
0224 
0225   // Perform kerberos login using the hadoop shim API if the configuration is available
0226   public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
0227     String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
0228     String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
0229     if (principal.isEmpty() || keyTabFile.isEmpty()) {
0230       throw new IOException("HiveServer2 Kerberos principal or keytab is not correctly configured");
0231     } else {
0232       UserGroupInformation.loginUserFromKeytab(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
0233     }
0234   }
0235 
0236   // Perform SPNEGO login using the hadoop shim API if the configuration is available
0237   public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(HiveConf hiveConf)
0238     throws IOException {
0239     String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
0240     String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
0241     if (principal.isEmpty() || keyTabFile.isEmpty()) {
0242       throw new IOException("HiveServer2 SPNEGO principal or keytab is not correctly configured");
0243     } else {
0244       return UserGroupInformation.loginUserFromKeytabAndReturnUGI(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
0245     }
0246   }
0247 
0248   public static TTransport getSocketTransport(String host, int port, int loginTimeout) {
0249     return new TSocket(host, port, loginTimeout);
0250   }
0251 
0252   public static TTransport getSSLSocket(String host, int port, int loginTimeout)
0253     throws TTransportException {
0254     return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
0255   }
0256 
0257   public static TTransport getSSLSocket(String host, int port, int loginTimeout,
0258     String trustStorePath, String trustStorePassWord) throws TTransportException {
0259     TSSLTransportFactory.TSSLTransportParameters params =
0260       new TSSLTransportFactory.TSSLTransportParameters();
0261     params.setTrustStore(trustStorePath, trustStorePassWord);
0262     params.requireClientAuth(true);
0263     return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
0264   }
0265 
0266   public static TServerSocket getServerSocket(String hiveHost, int portNum)
0267     throws TTransportException {
0268     InetSocketAddress serverAddress;
0269     if (hiveHost == null || hiveHost.isEmpty()) {
0270       // Wildcard bind
0271       serverAddress = new InetSocketAddress(portNum);
0272     } else {
0273       serverAddress = new InetSocketAddress(hiveHost, portNum);
0274     }
0275     return new TServerSocket(serverAddress);
0276   }
0277 
0278   public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath,
0279       String keyStorePassWord, List<String> sslVersionBlacklist) throws TTransportException,
0280       UnknownHostException {
0281     TSSLTransportFactory.TSSLTransportParameters params =
0282         new TSSLTransportFactory.TSSLTransportParameters();
0283     params.setKeyStore(keyStorePath, keyStorePassWord);
0284     InetSocketAddress serverAddress;
0285     if (hiveHost == null || hiveHost.isEmpty()) {
0286       // Wildcard bind
0287       serverAddress = new InetSocketAddress(portNum);
0288     } else {
0289       serverAddress = new InetSocketAddress(hiveHost, portNum);
0290     }
0291     TServerSocket thriftServerSocket =
0292         TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params);
0293     if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) {
0294       List<String> sslVersionBlacklistLocal = new ArrayList<String>();
0295       for (String sslVersion : sslVersionBlacklist) {
0296         sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase(Locale.ROOT));
0297       }
0298       SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket();
0299       List<String> enabledProtocols = new ArrayList<String>();
0300       for (String protocol : sslServerSocket.getEnabledProtocols()) {
0301         if (sslVersionBlacklistLocal.contains(protocol.toLowerCase(Locale.ROOT))) {
0302           LOG.debug("Disabling SSL Protocol: " + protocol);
0303         } else {
0304           enabledProtocols.add(protocol);
0305         }
0306       }
0307       sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new String[0]));
0308       LOG.info("SSL Server Socket Enabled Protocols: "
0309           + Arrays.toString(sslServerSocket.getEnabledProtocols()));
0310     }
0311     return thriftServerSocket;
0312   }
0313 
0314   // retrieve delegation token for the given user
0315   public String getDelegationToken(String owner, String renewer) throws HiveSQLException {
0316     if (saslServer == null) {
0317       throw new HiveSQLException(
0318           "Delegation token only supported over kerberos authentication", "08S01");
0319     }
0320 
0321     try {
0322       String tokenStr = saslServer.getDelegationTokenWithService(owner, renewer, HS2_CLIENT_TOKEN);
0323       if (tokenStr == null || tokenStr.isEmpty()) {
0324         throw new HiveSQLException(
0325             "Received empty retrieving delegation token for user " + owner, "08S01");
0326       }
0327       return tokenStr;
0328     } catch (IOException e) {
0329       throw new HiveSQLException(
0330           "Error retrieving delegation token for user " + owner, "08S01", e);
0331     } catch (InterruptedException e) {
0332       throw new HiveSQLException("delegation token retrieval interrupted", "08S01", e);
0333     }
0334   }
0335 
0336   // cancel given delegation token
0337   public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
0338     if (saslServer == null) {
0339       throw new HiveSQLException(
0340           "Delegation token only supported over kerberos authentication", "08S01");
0341     }
0342     try {
0343       saslServer.cancelDelegationToken(delegationToken);
0344     } catch (IOException e) {
0345       throw new HiveSQLException(
0346           "Error canceling delegation token " + delegationToken, "08S01", e);
0347     }
0348   }
0349 
0350   public void renewDelegationToken(String delegationToken) throws HiveSQLException {
0351     if (saslServer == null) {
0352       throw new HiveSQLException(
0353           "Delegation token only supported over kerberos authentication", "08S01");
0354     }
0355     try {
0356       saslServer.renewDelegationToken(delegationToken);
0357     } catch (IOException e) {
0358       throw new HiveSQLException(
0359           "Error renewing delegation token " + delegationToken, "08S01", e);
0360     }
0361   }
0362 
0363   public String getUserFromToken(String delegationToken) throws HiveSQLException {
0364     if (saslServer == null) {
0365       throw new HiveSQLException(
0366           "Delegation token only supported over kerberos authentication", "08S01");
0367     }
0368     try {
0369       return saslServer.getUserFromToken(delegationToken);
0370     } catch (IOException e) {
0371       throw new HiveSQLException(
0372           "Error extracting user from delegation token " + delegationToken, "08S01", e);
0373     }
0374   }
0375 
0376   public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
0377     HiveConf hiveConf) throws HiveSQLException {
0378     try {
0379       UserGroupInformation sessionUgi;
0380       if (UserGroupInformation.isSecurityEnabled()) {
0381         KerberosNameShim kerbName = ShimLoader.getHadoopShims().getKerberosNameShim(realUser);
0382         sessionUgi = UserGroupInformation.createProxyUser(
0383             kerbName.getServiceName(), UserGroupInformation.getLoginUser());
0384       } else {
0385         sessionUgi = UserGroupInformation.createRemoteUser(realUser);
0386       }
0387       if (!proxyUser.equalsIgnoreCase(realUser)) {
0388         ProxyUsers.refreshSuperUserGroupsConfiguration(hiveConf);
0389         ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, sessionUgi),
0390             ipAddress, hiveConf);
0391       }
0392     } catch (IOException e) {
0393       throw new HiveSQLException(
0394         "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, "08S01", e);
0395     }
0396   }
0397 
0398   public static boolean needUgiLogin(UserGroupInformation ugi, String principal, String keytab) {
0399     return null == ugi || !ugi.hasKerberosCredentials() || !ugi.getUserName().equals(principal) ||
0400       !Objects.equals(keytab, getKeytabFromUgi());
0401   }
0402 
0403   private static String getKeytabFromUgi() {
0404     synchronized (UserGroupInformation.class) {
0405       try {
0406         if (keytabFile != null) {
0407           return (String) keytabFile.get(null);
0408         } else if (getKeytab != null) {
0409           return (String) getKeytab.invoke(UserGroupInformation.getCurrentUser());
0410         } else {
0411           return null;
0412         }
0413       } catch (Exception e) {
0414         LOG.debug("Fail to get keytabFile path via reflection", e);
0415         return null;
0416       }
0417     }
0418   }
0419 }