PageRenderTime 20ms CodeModel.GetById 30ms RepoModel.GetById 1ms app.codeStats 0ms

/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java

https://github.com/pkalmegh/hive
Java | 331 lines | 272 code | 34 blank | 25 comment | 61 complexity | 299ab7b8568222576f0bfd6adca10050 MD5 | raw file
  1. /**
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.hive.service.auth;
  19. import java.io.IOException;
  20. import java.net.InetAddress;
  21. import java.net.InetSocketAddress;
  22. import java.net.UnknownHostException;
  23. import java.text.MessageFormat;
  24. import java.util.HashMap;
  25. import java.util.Map;
  26. import javax.security.auth.login.LoginException;
  27. import javax.security.sasl.Sasl;
  28. import org.apache.hadoop.hive.conf.HiveConf;
  29. import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
  30. import org.apache.hadoop.hive.shims.ShimLoader;
  31. import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
  32. import org.apache.hadoop.security.UserGroupInformation;
  33. import org.apache.hive.service.cli.HiveSQLException;
  34. import org.apache.hive.service.cli.thrift.ThriftCLIService;
  35. import org.apache.thrift.TProcessorFactory;
  36. import org.apache.thrift.transport.TSSLTransportFactory;
  37. import org.apache.thrift.transport.TServerSocket;
  38. import org.apache.thrift.transport.TSocket;
  39. import org.apache.thrift.transport.TTransport;
  40. import org.apache.thrift.transport.TTransportException;
  41. import org.apache.thrift.transport.TTransportFactory;
  42. import org.slf4j.Logger;
  43. import org.slf4j.LoggerFactory;
  44. public class HiveAuthFactory {
  45. private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
  46. public static enum AuthTypes {
  47. NOSASL("NOSASL"),
  48. NONE("NONE"),
  49. LDAP("LDAP"),
  50. KERBEROS("KERBEROS"),
  51. CUSTOM("CUSTOM"),
  52. PAM("PAM");
  53. private String authType;
  54. AuthTypes(String authType) {
  55. this.authType = authType;
  56. }
  57. public String getAuthName() {
  58. return authType;
  59. }
  60. };
  61. private HadoopThriftAuthBridge.Server saslServer = null;
  62. private String authTypeStr;
  63. private String transportMode;
  64. private final HiveConf conf;
  65. public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
  66. public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
  67. public HiveAuthFactory(HiveConf conf) throws TTransportException {
  68. this.conf = conf;
  69. transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
  70. authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
  71. // In http mode we use NOSASL as the default auth type
  72. if (transportMode.equalsIgnoreCase("http")) {
  73. if (authTypeStr == null) {
  74. authTypeStr = AuthTypes.NOSASL.getAuthName();
  75. }
  76. }
  77. else {
  78. if (authTypeStr == null) {
  79. authTypeStr = AuthTypes.NONE.getAuthName();
  80. }
  81. if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())
  82. && ShimLoader.getHadoopShims().isSecureShimImpl()) {
  83. saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer(
  84. conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
  85. conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)
  86. );
  87. // start delegation token manager
  88. try {
  89. saslServer.startDelegationTokenSecretManager(conf, null);
  90. } catch (IOException e) {
  91. throw new TTransportException("Failed to start token manager", e);
  92. }
  93. }
  94. }
  95. }
  96. public Map<String, String> getSaslProperties() {
  97. Map<String, String> saslProps = new HashMap<String, String>();
  98. SaslQOP saslQOP =
  99. SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
  100. // hadoop.rpc.protection being set to a higher level than hive.server2.thrift.rpc.protection
  101. // does not make sense in most situations. Log warning message in such cases.
  102. Map<String, String> hadoopSaslProps = ShimLoader.getHadoopThriftAuthBridge().
  103. getHadoopSaslProperties(conf);
  104. SaslQOP hadoopSaslQOP = SaslQOP.fromString(hadoopSaslProps.get(Sasl.QOP));
  105. if(hadoopSaslQOP.ordinal() > saslQOP.ordinal()) {
  106. LOG.warn(MessageFormat.format("\"hadoop.rpc.protection\" is set to higher security level " +
  107. "{0} then {1} which is set to {2}", hadoopSaslQOP.toString(),
  108. ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP.varname, saslQOP.toString()));
  109. }
  110. saslProps.put(Sasl.QOP, saslQOP.toString());
  111. saslProps.put(Sasl.SERVER_AUTH, "true");
  112. return saslProps;
  113. }
  114. public TTransportFactory getAuthTransFactory() throws LoginException {
  115. TTransportFactory transportFactory;
  116. if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
  117. try {
  118. transportFactory = saslServer.createTransportFactory(getSaslProperties());
  119. } catch (TTransportException e) {
  120. throw new LoginException(e.getMessage());
  121. }
  122. } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName())) {
  123. transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
  124. } else if (authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName())) {
  125. transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
  126. } else if (authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName())) {
  127. transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
  128. } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) {
  129. transportFactory = new TTransportFactory();
  130. } else if (authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
  131. transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
  132. } else {
  133. throw new LoginException("Unsupported authentication type " + authTypeStr);
  134. }
  135. return transportFactory;
  136. }
  137. public TProcessorFactory getAuthProcFactory(ThriftCLIService service)
  138. throws LoginException {
  139. if (transportMode.equalsIgnoreCase("http")) {
  140. return HttpAuthUtils.getAuthProcFactory(service);
  141. }
  142. else {
  143. if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
  144. return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
  145. } else {
  146. return PlainSaslHelper.getPlainProcessorFactory(service);
  147. }
  148. }
  149. }
  150. public String getRemoteUser() {
  151. if (saslServer != null) {
  152. return saslServer.getRemoteUser();
  153. } else {
  154. return null;
  155. }
  156. }
  157. public String getIpAddress() {
  158. if(saslServer != null && saslServer.getRemoteAddress() != null) {
  159. return saslServer.getRemoteAddress().getHostAddress();
  160. } else {
  161. return null;
  162. }
  163. }
  164. // Perform kerberos login using the hadoop shim API if the configuration is available
  165. public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
  166. String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
  167. String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
  168. if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
  169. ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
  170. } else {
  171. throw new IOException ("HiveServer2 kerberos principal or keytab " +
  172. "is not correctly configured");
  173. }
  174. }
  175. // Perform spnego login using the hadoop shim API if the configuration is available
  176. public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(
  177. HiveConf hiveConf) throws IOException {
  178. String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
  179. String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
  180. if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
  181. return ShimLoader.getHadoopShims().loginUserFromKeytabAndReturnUGI(
  182. principal, keyTabFile);
  183. } else {
  184. throw new IOException ("HiveServer2 SPNego principal or keytab " +
  185. "is not correctly configured");
  186. }
  187. }
  188. public static TTransport getSocketTransport(String host, int port, int loginTimeout)
  189. throws TTransportException {
  190. return new TSocket(host, port, loginTimeout);
  191. }
  192. public static TTransport getSSLSocket(String host, int port, int loginTimeout)
  193. throws TTransportException {
  194. return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
  195. }
  196. public static TTransport getSSLSocket(String host, int port, int loginTimeout,
  197. String trustStorePath, String trustStorePassWord) throws TTransportException {
  198. TSSLTransportFactory.TSSLTransportParameters params =
  199. new TSSLTransportFactory.TSSLTransportParameters();
  200. params.setTrustStore(trustStorePath, trustStorePassWord);
  201. params.requireClientAuth(true);
  202. return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
  203. }
  204. public static TServerSocket getServerSocket(String hiveHost, int portNum)
  205. throws TTransportException {
  206. InetSocketAddress serverAddress = null;
  207. if (hiveHost != null && !hiveHost.isEmpty()) {
  208. serverAddress = new InetSocketAddress(hiveHost, portNum);
  209. } else {
  210. serverAddress = new InetSocketAddress(portNum);
  211. }
  212. return new TServerSocket(serverAddress );
  213. }
  214. public static TServerSocket getServerSSLSocket(String hiveHost, int portNum,
  215. String keyStorePath, String keyStorePassWord) throws TTransportException, UnknownHostException {
  216. TSSLTransportFactory.TSSLTransportParameters params =
  217. new TSSLTransportFactory.TSSLTransportParameters();
  218. params.setKeyStore(keyStorePath, keyStorePassWord);
  219. InetAddress serverAddress;
  220. if (hiveHost == null || hiveHost.isEmpty()) {
  221. serverAddress = InetAddress.getLocalHost();
  222. } else {
  223. serverAddress = InetAddress.getByName(hiveHost);
  224. }
  225. return TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress, params);
  226. }
  227. // retrieve delegation token for the given user
  228. public String getDelegationToken(String owner, String renewer) throws HiveSQLException {
  229. if (saslServer == null) {
  230. throw new HiveSQLException(
  231. "Delegation token only supported over kerberos authentication");
  232. }
  233. try {
  234. String tokenStr = saslServer.getDelegationTokenWithService(owner, renewer, HS2_CLIENT_TOKEN);
  235. if (tokenStr == null || tokenStr.isEmpty()) {
  236. throw new HiveSQLException("Received empty retrieving delegation token for user " + owner);
  237. }
  238. return tokenStr;
  239. } catch (IOException e) {
  240. throw new HiveSQLException("Error retrieving delegation token for user " + owner, e);
  241. } catch (InterruptedException e) {
  242. throw new HiveSQLException("delegation token retrieval interrupted", e);
  243. }
  244. }
  245. // cancel given delegation token
  246. public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
  247. if (saslServer == null) {
  248. throw new HiveSQLException(
  249. "Delegation token only supported over kerberos authentication");
  250. }
  251. try {
  252. saslServer.cancelDelegationToken(delegationToken);
  253. } catch (IOException e) {
  254. throw new HiveSQLException("Error canceling delegation token " + delegationToken, e);
  255. }
  256. }
  257. public void renewDelegationToken(String delegationToken) throws HiveSQLException {
  258. if (saslServer == null) {
  259. throw new HiveSQLException(
  260. "Delegation token only supported over kerberos authentication");
  261. }
  262. try {
  263. saslServer.renewDelegationToken(delegationToken);
  264. } catch (IOException e) {
  265. throw new HiveSQLException("Error renewing delegation token " + delegationToken, e);
  266. }
  267. }
  268. public String getUserFromToken(String delegationToken) throws HiveSQLException {
  269. if (saslServer == null) {
  270. throw new HiveSQLException(
  271. "Delegation token only supported over kerberos authentication");
  272. }
  273. try {
  274. return saslServer.getUserFromToken(delegationToken);
  275. } catch (IOException e) {
  276. throw new HiveSQLException("Error extracting user from delegation token " + delegationToken, e);
  277. }
  278. }
  279. public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
  280. HiveConf hiveConf) throws HiveSQLException {
  281. UserGroupInformation sessionUgi;
  282. try {
  283. if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
  284. sessionUgi = ShimLoader.getHadoopShims().createProxyUser(realUser);
  285. } else {
  286. sessionUgi = ShimLoader.getHadoopShims().createRemoteUser(realUser, null);
  287. }
  288. if (!proxyUser.equalsIgnoreCase(realUser)) {
  289. ShimLoader.getHadoopShims().
  290. authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
  291. }
  292. } catch (IOException e) {
  293. throw new HiveSQLException("Failed to validate proxy privilage of " + realUser +
  294. " for " + proxyUser, e);
  295. }
  296. }
  297. }