PageRenderTime 39ms CodeModel.GetById 9ms app.highlight 25ms RepoModel.GetById 1ms app.codeStats 0ms

/tags/release-0.0.0-rc0/hive/external/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java

#
Java | 639 lines | 436 code | 93 blank | 110 comment | 66 complexity | edbcf8a9817ba72114ed3bdcf7b0a73f MD5 | raw file
  1/**
  2 * Licensed to the Apache Software Foundation (ASF) under one
  3 * or more contributor license agreements.  See the NOTICE file
  4 * distributed with this work for additional information
  5 * regarding copyright ownership.  The ASF licenses this file
  6 * to you under the Apache License, Version 2.0 (the
  7 * "License"); you may not use this file except in compliance
  8 * with the License.  You may obtain a copy of the License at
  9 *
 10 *     http://www.apache.org/licenses/LICENSE-2.0
 11 *
 12 * Unless required by applicable law or agreed to in writing, software
 13 * distributed under the License is distributed on an "AS IS" BASIS,
 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 15 * See the License for the specific language governing permissions and
 16 * limitations under the License.
 17 */
 18
 19package org.apache.hadoop.hive.ql.session;
 20
 21import java.io.File;
 22import java.io.IOException;
 23import java.io.InputStream;
 24import java.io.PrintStream;
 25import java.net.URI;
 26import java.net.URL;
 27import java.util.Calendar;
 28import java.util.GregorianCalendar;
 29import java.util.HashMap;
 30import java.util.HashSet;
 31import java.util.List;
 32import java.util.Set;
 33
 34import org.apache.commons.lang.StringUtils;
 35import org.apache.commons.logging.Log;
 36import org.apache.commons.logging.LogFactory;
 37import org.apache.hadoop.conf.Configuration;
 38import org.apache.hadoop.fs.FileSystem;
 39import org.apache.hadoop.fs.Path;
 40import org.apache.hadoop.hive.conf.HiveConf;
 41import org.apache.hadoop.hive.ql.exec.Utilities;
 42import org.apache.hadoop.hive.ql.history.HiveHistory;
 43import org.apache.hadoop.hive.ql.metadata.HiveException;
 44import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 45import org.apache.hadoop.hive.ql.plan.HiveOperation;
 46import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 47import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 48import org.apache.hadoop.hive.ql.util.DosToUnix;
 49import org.apache.log4j.LogManager;
 50import org.apache.log4j.PropertyConfigurator;
 51
 52/**
 53 * SessionState encapsulates common data associated with a session.
 54 *
 55 * Also provides support for a thread static session object that can be accessed
 56 * from any point in the code to interact with the user and to retrieve
 57 * configuration information
 58 */
 59public class SessionState {
 60
 61  /**
 62   * current configuration.
 63   */
 64  protected HiveConf conf;
 65
 66  /**
 67   * silent mode.
 68   */
 69  protected boolean isSilent;
 70
 71  /**
 72   * verbose mode
 73   */
 74  protected boolean isVerbose;
 75
 76  /*
 77   * HiveHistory Object
 78   */
 79  protected HiveHistory hiveHist;
 80  /**
 81   * Streams to read/write from.
 82   */
 83  public InputStream in;
 84  public PrintStream out;
 85  public PrintStream err;
 86  /**
 87   * Standard output from any child process(es).
 88   */
 89  public PrintStream childOut;
 90  /**
 91   * Error output from any child process(es).
 92   */
 93  public PrintStream childErr;
 94
 95  /**
 96   * type of the command.
 97   */
 98  private HiveOperation commandType;
 99  
100  private HiveAuthorizationProvider authorizer;
101  
102  private HiveAuthenticationProvider authenticator;
103  
104  private CreateTableAutomaticGrant createTableGrants;
105  
106  /**
107   * Lineage state.
108   */
109  LineageState ls;
110
111  /**
112   * Get the lineage state stored in this session.
113   *
114   * @return LineageState
115   */
116  public LineageState getLineageState() {
117    return ls;
118  }
119
120  public HiveConf getConf() {
121    return conf;
122  }
123
124  public void setConf(HiveConf conf) {
125    this.conf = conf;
126  }
127
128  public boolean getIsSilent() {
129    if(conf != null) {
130      return conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT);
131    } else {
132      return isSilent;
133    }
134  }
135
136  public void setIsSilent(boolean isSilent) {
137    if(conf != null) {
138      conf.setBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT, isSilent);
139    }
140    this.isSilent = isSilent;
141  }
142
143  public boolean getIsVerbose() {
144    return isVerbose;
145  }
146
147  public void setIsVerbose(boolean isVerbose) {
148    this.isVerbose = isVerbose;
149  }
150
151  public SessionState() {
152    this(null);
153  }
154
155  public SessionState(HiveConf conf) {
156    this.conf = conf;
157    isSilent = conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT);
158    ls = new LineageState();
159  }
160
161  public void setCmd(String cmdString) {
162    conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, cmdString);
163  }
164
165  public String getCmd() {
166    return (conf.getVar(HiveConf.ConfVars.HIVEQUERYSTRING));
167  }
168
169  public String getQueryId() {
170    return (conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
171  }
172
173  public String getSessionId() {
174    return (conf.getVar(HiveConf.ConfVars.HIVESESSIONID));
175  }
176
177  /**
178   * Singleton Session object per thread.
179   *
180   **/
181  private static ThreadLocal<SessionState> tss = new ThreadLocal<SessionState>();
182
183  /**
184   * start a new session and set it to current session.
185   * @throws HiveException 
186   */
187  public static SessionState start(HiveConf conf) throws HiveException {
188    SessionState ss = new SessionState(conf);
189    ss.getConf().setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
190    ss.hiveHist = new HiveHistory(ss);
191    ss.authenticator = HiveUtils.getAuthenticator(conf);
192    ss.authorizer = HiveUtils.getAuthorizeProviderManager(
193        conf, ss.authenticator);
194    ss.createTableGrants = CreateTableAutomaticGrant.create(conf);
195    tss.set(ss);
196    return (ss);
197  }
198
199  /**
200   * set current session to existing session object if a thread is running
201   * multiple sessions - it must call this method with the new session object
202   * when switching from one session to another.
203   * @throws HiveException 
204   */
205  public static SessionState start(SessionState startSs) {
206
207    tss.set(startSs);
208    if (StringUtils.isEmpty(startSs.getConf().getVar(
209        HiveConf.ConfVars.HIVESESSIONID))) {
210      startSs.getConf()
211          .setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
212    }
213
214    if (startSs.hiveHist == null) {
215      startSs.hiveHist = new HiveHistory(startSs);
216    }
217    
218    try {
219      startSs.authenticator = HiveUtils.getAuthenticator(startSs
220          .getConf());
221      startSs.authorizer = HiveUtils.getAuthorizeProviderManager(startSs
222          .getConf(), startSs.authenticator);
223      startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs
224          .getConf());
225    } catch (HiveException e) {
226      throw new RuntimeException(e);
227    }
228    
229    return startSs;
230  }
231
232  /**
233   * get the current session.
234   */
235  public static SessionState get() {
236    return tss.get();
237  }
238
239  /**
240   * get hiveHitsory object which does structured logging.
241   *
242   * @return The hive history object
243   */
244  public HiveHistory getHiveHistory() {
245    return hiveHist;
246  }
247
248  private static String makeSessionId() {
249    GregorianCalendar gc = new GregorianCalendar();
250    String userid = System.getProperty("user.name");
251
252    return userid
253        + "_"
254        + String.format("%1$4d%2$02d%3$02d%4$02d%5$02d", gc.get(Calendar.YEAR),
255        gc.get(Calendar.MONTH) + 1, gc.get(Calendar.DAY_OF_MONTH), gc
256        .get(Calendar.HOUR_OF_DAY), gc.get(Calendar.MINUTE));
257  }
258
259  public static final String HIVE_L4J = "hive-log4j.properties";
260  public static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties";
261
262  public static void initHiveLog4j() {
263    // allow hive log4j to override any normal initialized one
264    URL hive_l4j = SessionState.class.getClassLoader().getResource(HIVE_L4J);
265    if (hive_l4j == null) {
266      System.out.println(HIVE_L4J + " not found");
267    } else {
268      LogManager.resetConfiguration();
269      PropertyConfigurator.configure(hive_l4j);
270    }
271  }
272
273  /**
274   * This class provides helper routines to emit informational and error
275   * messages to the user and log4j files while obeying the current session's
276   * verbosity levels.
277   *
278   * NEVER write directly to the SessionStates standard output other than to
279   * emit result data DO use printInfo and printError provided by LogHelper to
280   * emit non result data strings.
281   *
282   * It is perfectly acceptable to have global static LogHelper objects (for
283   * example - once per module) LogHelper always emits info/error to current
284   * session as required.
285   */
286  public static class LogHelper {
287
288    protected Log LOG;
289    protected boolean isSilent;
290
291    public LogHelper(Log LOG) {
292      this(LOG, false);
293    }
294
295    public LogHelper(Log LOG, boolean isSilent) {
296      this.LOG = LOG;
297      this.isSilent = isSilent;
298    }
299
300    public PrintStream getOutStream() {
301      SessionState ss = SessionState.get();
302      return ((ss != null) && (ss.out != null)) ? ss.out : System.out;
303    }
304
305    public PrintStream getErrStream() {
306      SessionState ss = SessionState.get();
307      return ((ss != null) && (ss.err != null)) ? ss.err : System.err;
308    }
309
310    public PrintStream getChildOutStream() {
311      SessionState ss = SessionState.get();
312      return ((ss != null) && (ss.childOut != null)) ? ss.childOut : System.out;
313    }
314
315    public PrintStream getChildErrStream() {
316      SessionState ss = SessionState.get();
317      return ((ss != null) && (ss.childErr != null)) ? ss.childErr : System.err;
318    }
319
320    public boolean getIsSilent() {
321      SessionState ss = SessionState.get();
322      // use the session or the one supplied in constructor
323      return (ss != null) ? ss.getIsSilent() : isSilent;
324    }
325
326    public void printInfo(String info) {
327      printInfo(info, null);
328    }
329
330    public void printInfo(String info, String detail) {
331      if (!getIsSilent()) {
332        getErrStream().println(info);
333      }
334      LOG.info(info + StringUtils.defaultString(detail));
335    }
336
337    public void printError(String error) {
338      printError(error, null);
339    }
340
341    public void printError(String error, String detail) {
342      getErrStream().println(error);
343      LOG.error(error + StringUtils.defaultString(detail));
344    }
345  }
346
347  private static LogHelper _console;
348
349  /**
350   * initialize or retrieve console object for SessionState.
351   */
352  public static LogHelper getConsole() {
353    if (_console == null) {
354      Log LOG = LogFactory.getLog("SessionState");
355      _console = new LogHelper(LOG);
356    }
357    return _console;
358  }
359
360  public static String validateFile(Set<String> curFiles, String newFile) {
361    SessionState ss = SessionState.get();
362    LogHelper console = getConsole();
363    Configuration conf = (ss == null) ? new Configuration() : ss.getConf();
364
365    try {
366      if (Utilities.realFile(newFile, conf) != null) {
367        return newFile;
368      } else {
369        console.printError(newFile + " does not exist");
370        return null;
371      }
372    } catch (IOException e) {
373      console.printError("Unable to validate " + newFile + "\nException: "
374          + e.getMessage(), "\n"
375          + org.apache.hadoop.util.StringUtils.stringifyException(e));
376      return null;
377    }
378  }
379
380  public static boolean registerJar(String newJar) {
381    LogHelper console = getConsole();
382    try {
383      ClassLoader loader = Thread.currentThread().getContextClassLoader();
384      Thread.currentThread().setContextClassLoader(
385          Utilities.addToClassPath(loader, StringUtils.split(newJar, ",")));
386      console.printInfo("Added " + newJar + " to class path");
387      return true;
388    } catch (Exception e) {
389      console.printError("Unable to register " + newJar + "\nException: "
390          + e.getMessage(), "\n"
391          + org.apache.hadoop.util.StringUtils.stringifyException(e));
392      return false;
393    }
394  }
395
396  public static boolean unregisterJar(String jarsToUnregister) {
397    LogHelper console = getConsole();
398    try {
399      Utilities.removeFromClassPath(StringUtils.split(jarsToUnregister, ","));
400      console.printInfo("Deleted " + jarsToUnregister + " from class path");
401      return true;
402    } catch (Exception e) {
403      console.printError("Unable to unregister " + jarsToUnregister
404          + "\nException: " + e.getMessage(), "\n"
405          + org.apache.hadoop.util.StringUtils.stringifyException(e));
406      return false;
407    }
408  }
409
410  /**
411   * ResourceHook.
412   *
413   */
414  public static interface ResourceHook {
415    String preHook(Set<String> cur, String s);
416
417    boolean postHook(Set<String> cur, String s);
418  }
419
420  /**
421   * ResourceType.
422   *
423   */
424  public static enum ResourceType {
425    FILE(new ResourceHook() {
426      public String preHook(Set<String> cur, String s) {
427        return validateFile(cur, s);
428      }
429
430      public boolean postHook(Set<String> cur, String s) {
431        return true;
432      }
433    }),
434
435    JAR(new ResourceHook() {
436      public String preHook(Set<String> cur, String s) {
437        String newJar = validateFile(cur, s);
438        if (newJar != null) {
439          return (registerJar(newJar) ? newJar : null);
440        } else {
441          return null;
442        }
443      }
444
445      public boolean postHook(Set<String> cur, String s) {
446        return unregisterJar(s);
447      }
448    }),
449
450    ARCHIVE(new ResourceHook() {
451      public String preHook(Set<String> cur, String s) {
452        return validateFile(cur, s);
453      }
454
455      public boolean postHook(Set<String> cur, String s) {
456        return true;
457      }
458    });
459
460    public ResourceHook hook;
461
462    ResourceType(ResourceHook hook) {
463      this.hook = hook;
464    }
465  };
466
467  public static ResourceType find_resource_type(String s) {
468
469    s = s.trim().toUpperCase();
470
471    try {
472      return ResourceType.valueOf(s);
473    } catch (IllegalArgumentException e) {
474    }
475
476    // try singular
477    if (s.endsWith("S")) {
478      s = s.substring(0, s.length() - 1);
479    } else {
480      return null;
481    }
482
483    try {
484      return ResourceType.valueOf(s);
485    } catch (IllegalArgumentException e) {
486    }
487    return null;
488  }
489
490  private final HashMap<ResourceType, HashSet<String>> resource_map =
491    new HashMap<ResourceType, HashSet<String>>();
492
493  public void add_resource(ResourceType t, String value) {
494    // By default don't convert to unix
495    add_resource(t, value, false);
496  }
497
498  public String add_resource(ResourceType t, String value, boolean convertToUnix) {
499    try {
500      value = downloadResource(value, convertToUnix);
501    } catch (Exception e) {
502      getConsole().printError(e.getMessage());
503      return null;
504    }
505
506    if (resource_map.get(t) == null) {
507      resource_map.put(t, new HashSet<String>());
508    }
509
510    String fnlVal = value;
511    if (t.hook != null) {
512      fnlVal = t.hook.preHook(resource_map.get(t), value);
513      if (fnlVal == null) {
514        return fnlVal;
515      }
516    }
517    getConsole().printInfo("Added resource: " + fnlVal);
518    resource_map.get(t).add(fnlVal);
519
520    return fnlVal;
521  }
522
523  /**
524   * Returns the list of filesystem schemas as regex which
525   * are permissible for download as a resource.
526   */
527  public static String getMatchingSchemaAsRegex() {
528    String[] matchingSchema = {"s3", "s3n", "hdfs"};
529    return StringUtils.join(matchingSchema, "|");
530  }
531
532  private String downloadResource(String value, boolean convertToUnix) {
533    if (value.matches("("+ getMatchingSchemaAsRegex() +")://.*")) {
534      getConsole().printInfo("converting to local " + value);
535      File resourceDir = new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
536      String destinationName = new Path(value).getName();
537      File destinationFile = new File(resourceDir, destinationName);
538      if ( resourceDir.exists() && ! resourceDir.isDirectory() ) {
539        throw new RuntimeException("The resource directory is not a directory, resourceDir is set to" + resourceDir);
540      }
541      if ( ! resourceDir.exists() && ! resourceDir.mkdirs() ) {
542        throw new RuntimeException("Couldn't create directory " + resourceDir);
543      }
544      try {
545        FileSystem fs = FileSystem.get(new URI(value), conf);
546        fs.copyToLocalFile(new Path(value), new Path(destinationFile.getCanonicalPath()));
547        value = destinationFile.getCanonicalPath();
548        if (convertToUnix && DosToUnix.isWindowsScript(destinationFile)) {
549          try {
550            DosToUnix.convertWindowsScriptToUnix(destinationFile);
551          } catch (Exception e) {
552            throw new RuntimeException("Caught exception while converting to unix line endings", e);
553          }
554        }
555      } catch (Exception e) {
556        throw new RuntimeException("Failed to read external resource " + value, e);
557      }
558    }
559    return value;
560  }
561
562  public boolean delete_resource(ResourceType t, String value) {
563    if (resource_map.get(t) == null) {
564      return false;
565    }
566    if (t.hook != null) {
567      if (!t.hook.postHook(resource_map.get(t), value)) {
568        return false;
569      }
570    }
571    return (resource_map.get(t).remove(value));
572  }
573
574  public Set<String> list_resource(ResourceType t, List<String> filter) {
575    if (resource_map.get(t) == null) {
576      return null;
577    }
578    Set<String> orig = resource_map.get(t);
579    if (filter == null) {
580      return orig;
581    } else {
582      Set<String> fnl = new HashSet<String>();
583      for (String one : orig) {
584        if (filter.contains(one)) {
585          fnl.add(one);
586        }
587      }
588      return fnl;
589    }
590  }
591
592  public void delete_resource(ResourceType t) {
593    if (resource_map.get(t) != null) {
594      for (String value : resource_map.get(t)) {
595        delete_resource(t, value);
596      }
597      resource_map.remove(t);
598    }
599  }
600
601  public String getCommandType() {
602    if (commandType == null) {
603      return null;
604    }
605    return commandType.getOperationName();
606  }
607  
608  public HiveOperation getHiveOperation() {
609    return commandType;
610  }
611
612  public void setCommandType(HiveOperation commandType) {
613    this.commandType = commandType;
614  }
615  
616  public HiveAuthorizationProvider getAuthorizer() {
617    return authorizer;
618  }
619
620  public void setAuthorizer(HiveAuthorizationProvider authorizer) {
621    this.authorizer = authorizer;
622  }
623
624  public HiveAuthenticationProvider getAuthenticator() {
625    return authenticator;
626  }
627
628  public void setAuthenticator(HiveAuthenticationProvider authenticator) {
629    this.authenticator = authenticator;
630  }
631  
632  public CreateTableAutomaticGrant getCreateTableGrants() {
633    return createTableGrants;
634  }
635
636  public void setCreateTableGrants(CreateTableAutomaticGrant createTableGrants) {
637    this.createTableGrants = createTableGrants;
638  }
639}