PageRenderTime 50ms CodeModel.GetById 21ms RepoModel.GetById 0ms app.codeStats 0ms

/tags/release-0.0.0-rc0/src/test/org/apache/hcatalog/cli/TestPermsGrp.java

#
Java | 239 lines | 169 code | 43 blank | 27 comment | 2 complexity | 2769d197eeeb4c2dfa8009a76b3a8423 MD5 | raw file
Possible License(s): Apache-2.0, BSD-3-Clause, JSON, CPL-1.0
  1. /*
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.hcatalog.cli;
  19. import java.io.FileNotFoundException;
  20. import java.util.ArrayList;
  21. import junit.framework.TestCase;
  22. import org.apache.hadoop.fs.Path;
  23. import org.apache.hadoop.fs.permission.FsPermission;
  24. import org.apache.hadoop.hive.conf.HiveConf;
  25. import org.apache.hadoop.hive.metastore.HiveMetaStore;
  26. import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
  27. import org.apache.hadoop.hive.metastore.MetaStoreUtils;
  28. import org.apache.hadoop.hive.metastore.Warehouse;
  29. import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
  30. import org.apache.hadoop.hive.metastore.api.FieldSchema;
  31. import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
  32. import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
  33. import org.apache.hadoop.hive.metastore.api.MetaException;
  34. import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
  35. import org.apache.hadoop.hive.metastore.api.SerDeInfo;
  36. import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
  37. import org.apache.hadoop.hive.metastore.api.Table;
  38. import org.apache.hadoop.hive.metastore.api.Type;
  39. import org.apache.hadoop.hive.serde.Constants;
  40. import org.apache.hadoop.util.StringUtils;
  41. import org.apache.hcatalog.ExitException;
  42. import org.apache.hcatalog.NoExitSecurityManager;
  43. import org.apache.hcatalog.cli.HCatCli;
  44. import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
  45. import org.apache.hcatalog.common.HCatConstants;
  46. import org.apache.thrift.TException;
  47. public class TestPermsGrp extends TestCase {
  48. private boolean isServerRunning = false;
  49. private static final String msPort = "20101";
  50. private HiveConf howlConf;
  51. private Warehouse clientWH;
  52. private Thread t;
  53. private HiveMetaStoreClient msc;
  54. private static class RunMS implements Runnable {
  55. @Override
  56. public void run() {
  57. HiveMetaStore.main(new String[]{msPort});
  58. }
  59. }
  60. @Override
  61. protected void tearDown() throws Exception {
  62. System.setSecurityManager(securityManager);
  63. }
  64. @Override
  65. protected void setUp() throws Exception {
  66. if(isServerRunning) {
  67. return;
  68. }
  69. t = new Thread(new RunMS());
  70. t.start();
  71. Thread.sleep(40000);
  72. isServerRunning = true;
  73. securityManager = System.getSecurityManager();
  74. System.setSecurityManager(new NoExitSecurityManager());
  75. howlConf = new HiveConf(this.getClass());
  76. howlConf.set("hive.metastore.local", "false");
  77. howlConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort);
  78. howlConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3);
  79. howlConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
  80. howlConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
  81. howlConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
  82. howlConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
  83. clientWH = new Warehouse(howlConf);
  84. msc = new HiveMetaStoreClient(howlConf,null);
  85. System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
  86. System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
  87. }
  88. public void testCustomPerms() throws Exception {
  89. String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
  90. String tblName = "simptbl";
  91. String typeName = "Person";
  92. try {
  93. // Lets first test for default permissions, this is the case when user specified nothing.
  94. Table tbl = getTable(dbName,tblName,typeName);
  95. msc.createTable(tbl);
  96. Path dfsPath = clientWH.getDefaultTablePath(dbName, tblName);
  97. cleanupTbl(dbName, tblName, typeName);
  98. // Next user did specify perms.
  99. try{
  100. HCatCli.main(new String[]{"-e","create table simptbl (name string) stored as RCFILE", "-p","rwx-wx---"});
  101. }
  102. catch(Exception e){
  103. assertTrue(e instanceof ExitException);
  104. assertEquals(((ExitException)e).getStatus(), 0);
  105. }
  106. dfsPath = clientWH.getDefaultTablePath(dbName, tblName);
  107. assertTrue(dfsPath.getFileSystem(howlConf).getFileStatus(dfsPath).getPermission().equals(FsPermission.valueOf("drwx-wx---")));
  108. cleanupTbl(dbName, tblName, typeName);
  109. // User specified perms in invalid format.
  110. howlConf.set(HCatConstants.HCAT_PERMS, "rwx");
  111. // make sure create table fails.
  112. try{
  113. HCatCli.main(new String[]{"-e","create table simptbl (name string) stored as RCFILE", "-p","rwx"});
  114. assert false;
  115. }catch(Exception me){
  116. assertTrue(me instanceof ExitException);
  117. }
  118. // No physical dir gets created.
  119. dfsPath = clientWH.getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME,tblName);
  120. try{
  121. dfsPath.getFileSystem(howlConf).getFileStatus(dfsPath);
  122. assert false;
  123. } catch(Exception fnfe){
  124. assertTrue(fnfe instanceof FileNotFoundException);
  125. }
  126. // And no metadata gets created.
  127. try{
  128. msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
  129. assert false;
  130. }catch (Exception e){
  131. assertTrue(e instanceof NoSuchObjectException);
  132. assertEquals("default.simptbl table not found", e.getMessage());
  133. }
  134. // test for invalid group name
  135. howlConf.set(HCatConstants.HCAT_PERMS, "drw-rw-rw-");
  136. howlConf.set(HCatConstants.HCAT_GROUP, "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER");
  137. try{
  138. // create table must fail.
  139. HCatCli.main(new String[]{"-e","create table simptbl (name string) stored as RCFILE", "-p","rw-rw-rw-","-g","THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER"});
  140. assert false;
  141. }catch (Exception me){
  142. assertTrue(me instanceof SecurityException);
  143. }
  144. try{
  145. // no metadata should get created.
  146. msc.getTable(dbName, tblName);
  147. assert false;
  148. }catch (Exception e){
  149. assertTrue(e instanceof NoSuchObjectException);
  150. assertEquals("default.simptbl table not found", e.getMessage());
  151. }
  152. try{
  153. // neither dir should get created.
  154. dfsPath.getFileSystem(howlConf).getFileStatus(dfsPath);
  155. assert false;
  156. } catch(Exception e){
  157. assertTrue(e instanceof FileNotFoundException);
  158. }
  159. } catch (Exception e) {
  160. System.err.println(StringUtils.stringifyException(e));
  161. System.err.println("testCustomPerms failed.");
  162. throw e;
  163. }
  164. }
  165. private void silentDropDatabase(String dbName) throws MetaException, TException {
  166. try {
  167. for (String tableName : msc.getTables(dbName, "*")) {
  168. msc.dropTable(dbName, tableName);
  169. }
  170. } catch (NoSuchObjectException e) {
  171. }
  172. }
  173. private void cleanupTbl(String dbName, String tblName, String typeName) throws NoSuchObjectException, MetaException, TException, InvalidOperationException{
  174. msc.dropTable(dbName, tblName);
  175. msc.dropType(typeName);
  176. }
  177. private Table getTable(String dbName, String tblName, String typeName) throws NoSuchObjectException, MetaException, TException, AlreadyExistsException, InvalidObjectException{
  178. msc.dropTable(dbName, tblName);
  179. silentDropDatabase(dbName);
  180. msc.dropType(typeName);
  181. Type typ1 = new Type();
  182. typ1.setName(typeName);
  183. typ1.setFields(new ArrayList<FieldSchema>(1));
  184. typ1.getFields().add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
  185. msc.createType(typ1);
  186. Table tbl = new Table();
  187. tbl.setDbName(dbName);
  188. tbl.setTableName(tblName);
  189. StorageDescriptor sd = new StorageDescriptor();
  190. tbl.setSd(sd);
  191. sd.setCols(typ1.getFields());
  192. sd.setSerdeInfo(new SerDeInfo());
  193. return tbl;
  194. }
  195. private SecurityManager securityManager;
  196. }