Back to home page

OSCL-LXR

 
 

    


0001 /**
0002  * Licensed to the Apache Software Foundation (ASF) under one
0003  * or more contributor license agreements.  See the NOTICE file
0004  * distributed with this work for additional information
0005  * regarding copyright ownership.  The ASF licenses this file
0006  * to you under the Apache License, Version 2.0 (the
0007  * "License"); you may not use this file except in compliance
0008  * with the License.  You may obtain a copy of the License at
0009  *
0010  *     http://www.apache.org/licenses/LICENSE-2.0
0011  *
0012  * Unless required by applicable law or agreed to in writing, software
0013  * distributed under the License is distributed on an "AS IS" BASIS,
0014  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0015  * See the License for the specific language governing permissions and
0016  * limitations under the License.
0017  */
0018 
0019 package org.apache.hive.service.server;
0020 
0021 import java.util.Properties;
0022 
0023 import scala.runtime.AbstractFunction0;
0024 import scala.runtime.BoxedUnit;
0025 
0026 import org.apache.commons.cli.GnuParser;
0027 import org.apache.commons.cli.HelpFormatter;
0028 import org.apache.commons.cli.Option;
0029 import org.apache.commons.cli.OptionBuilder;
0030 import org.apache.commons.cli.Options;
0031 import org.apache.commons.cli.ParseException;
0032 import org.apache.commons.logging.Log;
0033 import org.apache.commons.logging.LogFactory;
0034 import org.apache.hadoop.hive.conf.HiveConf;
0035 import org.apache.hadoop.hive.shims.ShimLoader;
0036 import org.apache.hive.common.util.HiveStringUtils;
0037 import org.apache.hive.service.CompositeService;
0038 import org.apache.hive.service.cli.CLIService;
0039 import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
0040 import org.apache.hive.service.cli.thrift.ThriftCLIService;
0041 import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
0042 
0043 import org.apache.spark.util.ShutdownHookManager;
0044 
0045 /**
0046  * HiveServer2.
0047  *
0048  */
0049 public class HiveServer2 extends CompositeService {
0050   private static final Log LOG = LogFactory.getLog(HiveServer2.class);
0051 
0052   private CLIService cliService;
0053   private ThriftCLIService thriftCLIService;
0054 
0055   public HiveServer2() {
0056     super(HiveServer2.class.getSimpleName());
0057     HiveConf.setLoadHiveServer2Config(true);
0058   }
0059 
0060   @Override
0061   public synchronized void init(HiveConf hiveConf) {
0062     cliService = new CLIService(this);
0063     addService(cliService);
0064     if (isHTTPTransportMode(hiveConf)) {
0065       thriftCLIService = new ThriftHttpCLIService(cliService);
0066     } else {
0067       thriftCLIService = new ThriftBinaryCLIService(cliService);
0068     }
0069     addService(thriftCLIService);
0070     super.init(hiveConf);
0071 
0072     // Add a shutdown hook for catching SIGTERM & SIGINT
0073     // this must be higher than the Hadoop Filesystem priority of 10,
0074     // which the default priority is.
0075     // The signature of the callback must match that of a scala () -> Unit
0076     // function
0077     ShutdownHookManager.addShutdownHook(
0078         new AbstractFunction0<BoxedUnit>() {
0079           public BoxedUnit apply() {
0080             try {
0081               LOG.info("Hive Server Shutdown hook invoked");
0082               stop();
0083             } catch (Throwable e) {
0084               LOG.warn("Ignoring Exception while stopping Hive Server from shutdown hook",
0085                   e);
0086             }
0087             return BoxedUnit.UNIT;
0088           }
0089         });
0090   }
0091 
0092   public static boolean isHTTPTransportMode(HiveConf hiveConf) {
0093     String transportMode = System.getenv("HIVE_SERVER2_TRANSPORT_MODE");
0094     if (transportMode == null) {
0095       transportMode = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
0096     }
0097     if (transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
0098       return true;
0099     }
0100     return false;
0101   }
0102 
0103   @Override
0104   public synchronized void start() {
0105     super.start();
0106   }
0107 
0108   @Override
0109   public synchronized void stop() {
0110     LOG.info("Shutting down HiveServer2");
0111     super.stop();
0112   }
0113 
0114   private static void startHiveServer2() throws Throwable {
0115     long attempts = 0, maxAttempts = 1;
0116     while (true) {
0117       LOG.info("Starting HiveServer2");
0118       HiveConf hiveConf = new HiveConf();
0119       maxAttempts = hiveConf.getLongVar(HiveConf.ConfVars.HIVE_SERVER2_MAX_START_ATTEMPTS);
0120       HiveServer2 server = null;
0121       try {
0122         server = new HiveServer2();
0123         server.init(hiveConf);
0124         server.start();
0125         ShimLoader.getHadoopShims().startPauseMonitor(hiveConf);
0126         break;
0127       } catch (Throwable throwable) {
0128         if (server != null) {
0129           try {
0130             server.stop();
0131           } catch (Throwable t) {
0132             LOG.info("Exception caught when calling stop of HiveServer2 before retrying start", t);
0133           } finally {
0134             server = null;
0135           }
0136         }
0137         if (++attempts >= maxAttempts) {
0138           throw new Error("Max start attempts " + maxAttempts + " exhausted", throwable);
0139         } else {
0140           LOG.warn("Error starting HiveServer2 on attempt " + attempts
0141               + ", will retry in 60 seconds", throwable);
0142           try {
0143             Thread.sleep(60L * 1000L);
0144           } catch (InterruptedException e) {
0145             Thread.currentThread().interrupt();
0146           }
0147         }
0148       }
0149     }
0150   }
0151 
0152   public static void main(String[] args) {
0153     HiveConf.setLoadHiveServer2Config(true);
0154     ServerOptionsProcessor oproc = new ServerOptionsProcessor("hiveserver2");
0155     ServerOptionsProcessorResponse oprocResponse = oproc.parse(args);
0156 
0157     HiveStringUtils.startupShutdownMessage(HiveServer2.class, args, LOG);
0158 
0159     // Call the executor which will execute the appropriate command based on the parsed options
0160     oprocResponse.getServerOptionsExecutor().execute();
0161   }
0162 
0163   /**
0164    * ServerOptionsProcessor.
0165    * Process arguments given to HiveServer2 (-hiveconf property=value)
0166    * Set properties in System properties
0167    * Create an appropriate response object,
0168    * which has executor to execute the appropriate command based on the parsed options.
0169    */
0170   public static class ServerOptionsProcessor {
0171     private final Options options = new Options();
0172     private org.apache.commons.cli.CommandLine commandLine;
0173     private final String serverName;
0174     private final StringBuilder debugMessage = new StringBuilder();
0175 
0176     @SuppressWarnings("static-access")
0177     public ServerOptionsProcessor(String serverName) {
0178       this.serverName = serverName;
0179       // -hiveconf x=y
0180       options.addOption(OptionBuilder
0181           .withValueSeparator()
0182           .hasArgs(2)
0183           .withArgName("property=value")
0184           .withLongOpt("hiveconf")
0185           .withDescription("Use value for given property")
0186           .create());
0187       options.addOption(new Option("H", "help", false, "Print help information"));
0188     }
0189 
0190     public ServerOptionsProcessorResponse parse(String[] argv) {
0191       try {
0192         commandLine = new GnuParser().parse(options, argv);
0193         // Process --hiveconf
0194         // Get hiveconf param values and set the System property values
0195         Properties confProps = commandLine.getOptionProperties("hiveconf");
0196         for (String propKey : confProps.stringPropertyNames()) {
0197           // save logging message for log4j output latter after log4j initialize properly
0198           debugMessage.append("Setting " + propKey + "=" + confProps.getProperty(propKey) + ";\n");
0199           System.setProperty(propKey, confProps.getProperty(propKey));
0200         }
0201 
0202         // Process --help
0203         if (commandLine.hasOption('H')) {
0204           return new ServerOptionsProcessorResponse(new HelpOptionExecutor(serverName, options));
0205         }
0206       } catch (ParseException e) {
0207         // Error out & exit - we were not able to parse the args successfully
0208         System.err.println("Error starting HiveServer2 with given arguments: ");
0209         System.err.println(e.getMessage());
0210         System.exit(-1);
0211       }
0212       // Default executor, when no option is specified
0213       return new ServerOptionsProcessorResponse(new StartOptionExecutor());
0214     }
0215 
0216     StringBuilder getDebugMessage() {
0217       return debugMessage;
0218     }
0219   }
0220 
0221   /**
0222    * The response sent back from {@link ServerOptionsProcessor#parse(String[])}
0223    */
0224   static class ServerOptionsProcessorResponse {
0225     private final ServerOptionsExecutor serverOptionsExecutor;
0226 
0227     ServerOptionsProcessorResponse(ServerOptionsExecutor serverOptionsExecutor) {
0228       this.serverOptionsExecutor = serverOptionsExecutor;
0229     }
0230 
0231     ServerOptionsExecutor getServerOptionsExecutor() {
0232       return serverOptionsExecutor;
0233     }
0234   }
0235 
0236   /**
0237    * The executor interface for running the appropriate HiveServer2 command based on parsed options
0238    */
0239   interface ServerOptionsExecutor {
0240     void execute();
0241   }
0242 
0243   /**
0244    * HelpOptionExecutor: executes the --help option by printing out the usage
0245    */
0246   static class HelpOptionExecutor implements ServerOptionsExecutor {
0247     private final Options options;
0248     private final String serverName;
0249 
0250     HelpOptionExecutor(String serverName, Options options) {
0251       this.options = options;
0252       this.serverName = serverName;
0253     }
0254 
0255     @Override
0256     public void execute() {
0257       new HelpFormatter().printHelp(serverName, options);
0258       System.exit(0);
0259     }
0260   }
0261 
0262   /**
0263    * StartOptionExecutor: starts HiveServer2.
0264    * This is the default executor, when no option is specified.
0265    */
0266   static class StartOptionExecutor implements ServerOptionsExecutor {
0267     @Override
0268     public void execute() {
0269       try {
0270         startHiveServer2();
0271       } catch (Throwable t) {
0272         LOG.fatal("Error starting HiveServer2", t);
0273         System.exit(-1);
0274       }
0275     }
0276   }
0277 }