PageRenderTime 45ms CodeModel.GetById 17ms RepoModel.GetById 0ms app.codeStats 0ms

/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java

https://github.com/RS1999ent/hadoop-common
Java | 404 lines | 300 code | 42 blank | 62 comment | 26 complexity | 8811aa4b37be6156abd4b8472a09d00a MD5 | raw file
  1. /**
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.hadoop.test.system.process;
  19. import java.io.BufferedReader;
  20. import java.io.File;
  21. import java.io.FileReader;
  22. import java.io.IOException;
  23. import java.net.InetAddress;
  24. import java.util.ArrayList;
  25. import java.util.HashMap;
  26. import java.util.HashSet;
  27. import java.util.List;
  28. import java.util.Set;
  29. import org.apache.commons.logging.Log;
  30. import org.apache.commons.logging.LogFactory;
  31. import org.apache.hadoop.conf.Configuration;
  32. import org.apache.hadoop.util.Shell.ShellCommandExecutor;
  33. /**
  34. * The concrete class which implements the start up and shut down based routines
  35. * based on the hadoop-daemon.sh. <br/>
  36. *
  37. * Class requires two keys to be present in the Configuration objects passed to
  38. * it. Look at <code>CONF_HADOOPHOME</code> and
  39. * <code>CONF_HADOOPCONFDIR</code> for the names of the
  40. * configuration keys.
  41. *
  42. * Following will be the format which the final command execution would look :
  43. * <br/>
  44. * <code>
  45. * ssh host 'hadoop-home/bin/hadoop-daemon.sh --script scriptName
  46. * --config HADOOP_CONF_DIR (start|stop) command'
  47. * </code>
  48. */
  49. public abstract class HadoopDaemonRemoteCluster
  50. implements ClusterProcessManager {
  51. private static final Log LOG = LogFactory
  52. .getLog(HadoopDaemonRemoteCluster.class.getName());
  53. public static final String CONF_HADOOPNEWCONFDIR =
  54. "test.system.hdrc.hadoopnewconfdir";
  55. /**
  56. * Key used to configure the HADOOP_PREFIX to be used by the
  57. * HadoopDaemonRemoteCluster.
  58. */
  59. public final static String CONF_HADOOPHOME =
  60. "test.system.hdrc.hadoophome";
  61. public final static String CONF_SCRIPTDIR =
  62. "test.system.hdrc.deployed.scripts.dir";
  63. /**
  64. * Key used to configure the HADOOP_CONF_DIR to be used by the
  65. * HadoopDaemonRemoteCluster.
  66. */
  67. public final static String CONF_HADOOPCONFDIR =
  68. "test.system.hdrc.hadoopconfdir";
  69. public final static String CONF_DEPLOYED_HADOOPCONFDIR =
  70. "test.system.hdrc.deployed.hadoopconfdir";
  71. private String hadoopHome;
  72. protected String hadoopConfDir;
  73. protected String scriptsDir;
  74. protected String hadoopNewConfDir;
  75. private final Set<Enum<?>> roles;
  76. private final List<HadoopDaemonInfo> daemonInfos;
  77. private List<RemoteProcess> processes;
  78. protected Configuration conf;
  79. public static class HadoopDaemonInfo {
  80. public final String cmd;
  81. public final Enum<?> role;
  82. public final List<String> hostNames;
  83. public HadoopDaemonInfo(String cmd, Enum<?> role, List<String> hostNames) {
  84. super();
  85. this.cmd = cmd;
  86. this.role = role;
  87. this.hostNames = hostNames;
  88. }
  89. public HadoopDaemonInfo(String cmd, Enum<?> role, String hostFile)
  90. throws IOException {
  91. super();
  92. this.cmd = cmd;
  93. this.role = role;
  94. File file = new File(getDeployedHadoopConfDir(), hostFile);
  95. BufferedReader reader = null;
  96. hostNames = new ArrayList<String>();
  97. try {
  98. reader = new BufferedReader(new FileReader(file));
  99. String host = null;
  100. while ((host = reader.readLine()) != null) {
  101. if (host.trim().isEmpty() || host.startsWith("#")) {
  102. // Skip empty and possible comment lines
  103. // throw new IllegalArgumentException(
  104. // "Hostname could not be found in file " + hostFile);
  105. continue;
  106. }
  107. hostNames.add(host.trim());
  108. }
  109. if (hostNames.size() < 1) {
  110. throw new IllegalArgumentException("At least one hostname "
  111. +
  112. "is required to be present in file - " + hostFile);
  113. }
  114. } finally {
  115. try {
  116. reader.close();
  117. } catch (IOException e) {
  118. LOG.warn("Could not close reader");
  119. }
  120. }
  121. LOG.info("Created HadoopDaemonInfo for " + cmd + " " + role + " from "
  122. + hostFile);
  123. }
  124. }
  125. @Override
  126. public String pushConfig(String localDir) throws IOException {
  127. for (RemoteProcess process : processes){
  128. process.pushConfig(localDir);
  129. }
  130. return hadoopNewConfDir;
  131. }
  132. public HadoopDaemonRemoteCluster(List<HadoopDaemonInfo> daemonInfos) {
  133. this.daemonInfos = daemonInfos;
  134. this.roles = new HashSet<Enum<?>>();
  135. for (HadoopDaemonInfo info : daemonInfos) {
  136. this.roles.add(info.role);
  137. }
  138. }
  139. @Override
  140. public void init(Configuration conf) throws IOException {
  141. this.conf = conf;
  142. populateDirectories(conf);
  143. this.processes = new ArrayList<RemoteProcess>();
  144. populateDaemons();
  145. }
  146. @Override
  147. public List<RemoteProcess> getAllProcesses() {
  148. return processes;
  149. }
  150. @Override
  151. public Set<Enum<?>> getRoles() {
  152. return roles;
  153. }
  154. /**
  155. * Method to populate the hadoop home and hadoop configuration directories.
  156. *
  157. * @param conf
  158. * Configuration object containing values for
  159. * CONF_HADOOPHOME and
  160. * CONF_HADOOPCONFDIR
  161. *
  162. * @throws IllegalArgumentException
  163. * if the configuration or system property set does not contain
  164. * values for the required keys.
  165. */
  166. protected void populateDirectories(Configuration conf) {
  167. hadoopHome = conf.get(CONF_HADOOPHOME);
  168. hadoopConfDir = conf.get(CONF_HADOOPCONFDIR);
  169. scriptsDir = conf.get(CONF_SCRIPTDIR);
  170. hadoopNewConfDir = conf.get(CONF_HADOOPNEWCONFDIR);
  171. if (hadoopHome == null || hadoopConfDir == null || hadoopHome.isEmpty()
  172. || hadoopConfDir.isEmpty()) {
  173. LOG.error("No configuration "
  174. + "for the HADOOP_PREFIX and HADOOP_CONF_DIR passed");
  175. throw new IllegalArgumentException(
  176. "No Configuration passed for hadoop home " +
  177. "and hadoop conf directories");
  178. }
  179. }
  180. public static String getDeployedHadoopConfDir() {
  181. String dir = System.getProperty(CONF_DEPLOYED_HADOOPCONFDIR);
  182. if (dir == null || dir.isEmpty()) {
  183. LOG.error("No configuration "
  184. + "for the CONF_DEPLOYED_HADOOPCONFDIR passed");
  185. throw new IllegalArgumentException(
  186. "No Configuration passed for hadoop deployed conf directory");
  187. }
  188. return dir;
  189. }
  190. @Override
  191. public void start() throws IOException {
  192. for (RemoteProcess process : processes) {
  193. process.start();
  194. }
  195. }
  196. @Override
  197. public void start(String newConfLocation)throws IOException {
  198. for (RemoteProcess process : processes) {
  199. process.start(newConfLocation);
  200. }
  201. }
  202. @Override
  203. public void stop() throws IOException {
  204. for (RemoteProcess process : processes) {
  205. process.kill();
  206. }
  207. }
  208. @Override
  209. public void stop(String newConfLocation) throws IOException {
  210. for (RemoteProcess process : processes) {
  211. process.kill(newConfLocation);
  212. }
  213. }
  214. protected void populateDaemon(HadoopDaemonInfo info) throws IOException {
  215. for (String host : info.hostNames) {
  216. InetAddress addr = InetAddress.getByName(host);
  217. RemoteProcess process = getProcessManager(info,
  218. addr.getCanonicalHostName());
  219. processes.add(process);
  220. }
  221. }
  222. protected void populateDaemons() throws IOException {
  223. for (HadoopDaemonInfo info : daemonInfos) {
  224. populateDaemon(info);
  225. }
  226. }
  227. @Override
  228. public boolean isMultiUserSupported() throws IOException {
  229. return false;
  230. }
  231. protected RemoteProcess getProcessManager(
  232. HadoopDaemonInfo info, String hostName) {
  233. RemoteProcess process = new ScriptDaemon(info.cmd, hostName, info.role);
  234. return process;
  235. }
  236. /**
  237. * The core daemon class which actually implements the remote process
  238. * management of actual daemon processes in the cluster.
  239. *
  240. */
  241. class ScriptDaemon implements RemoteProcess {
  242. private static final String STOP_COMMAND = "stop";
  243. private static final String START_COMMAND = "start";
  244. private static final String SCRIPT_NAME = "hadoop-daemon.sh";
  245. private static final String PUSH_CONFIG ="pushConfig.sh";
  246. protected final String daemonName;
  247. protected final String hostName;
  248. private final Enum<?> role;
  249. public ScriptDaemon(String daemonName, String hostName, Enum<?> role) {
  250. this.daemonName = daemonName;
  251. this.hostName = hostName;
  252. this.role = role;
  253. }
  254. @Override
  255. public String getHostName() {
  256. return hostName;
  257. }
  258. private String[] getPushConfigCommand(String localDir, String remoteDir,
  259. File scriptDir) throws IOException{
  260. ArrayList<String> cmdArgs = new ArrayList<String>();
  261. cmdArgs.add(scriptDir.getAbsolutePath() + File.separator + PUSH_CONFIG);
  262. cmdArgs.add(localDir);
  263. cmdArgs.add(hostName);
  264. cmdArgs.add(remoteDir);
  265. cmdArgs.add(hadoopConfDir);
  266. return (String[]) cmdArgs.toArray(new String[cmdArgs.size()]);
  267. }
  268. private ShellCommandExecutor buildPushConfig(String local, String remote )
  269. throws IOException {
  270. File scriptDir = new File(scriptsDir);
  271. String[] commandArgs = getPushConfigCommand(local, remote, scriptDir);
  272. HashMap<String, String> env = new HashMap<String, String>();
  273. ShellCommandExecutor executor = new ShellCommandExecutor(commandArgs,
  274. scriptDir, env);
  275. LOG.info(executor.toString());
  276. return executor;
  277. }
  278. private ShellCommandExecutor createNewConfDir() throws IOException {
  279. ArrayList<String> cmdArgs = new ArrayList<String>();
  280. cmdArgs.add("ssh");
  281. cmdArgs.add(hostName);
  282. cmdArgs.add("if [ -d "+ hadoopNewConfDir+
  283. " ];\n then echo Will remove existing directory; rm -rf "+
  284. hadoopNewConfDir+";\nmkdir "+ hadoopNewConfDir+"; else \n"+
  285. "echo " + hadoopNewConfDir + " doesnt exist hence creating" +
  286. "; mkdir " + hadoopNewConfDir + ";\n fi");
  287. String[] cmd = (String[]) cmdArgs.toArray(new String[cmdArgs.size()]);
  288. ShellCommandExecutor executor = new ShellCommandExecutor(cmd);
  289. LOG.info(executor.toString());
  290. return executor;
  291. }
  292. @Override
  293. public void pushConfig(String localDir) throws IOException {
  294. createNewConfDir().execute();
  295. buildPushConfig(localDir, hadoopNewConfDir).execute();
  296. }
  297. private ShellCommandExecutor buildCommandExecutor(String command,
  298. String confDir) {
  299. String[] commandArgs = getCommand(command, confDir);
  300. File cwd = new File(".");
  301. HashMap<String, String> env = new HashMap<String, String>();
  302. env.put("HADOOP_CONF_DIR", confDir);
  303. ShellCommandExecutor executor
  304. = new ShellCommandExecutor(commandArgs, cwd, env);
  305. LOG.info(executor.toString());
  306. return executor;
  307. }
  308. private File getBinDir() {
  309. File binDir = new File(hadoopHome, "bin");
  310. return binDir;
  311. }
  312. protected String[] getCommand(String command, String confDir) {
  313. ArrayList<String> cmdArgs = new ArrayList<String>();
  314. File binDir = getBinDir();
  315. cmdArgs.add("ssh");
  316. cmdArgs.add(hostName);
  317. cmdArgs.add(binDir.getAbsolutePath() + File.separator + SCRIPT_NAME);
  318. cmdArgs.add("--config");
  319. cmdArgs.add(confDir);
  320. // XXX Twenty internal version does not support --script option.
  321. cmdArgs.add(command);
  322. cmdArgs.add(daemonName);
  323. return (String[]) cmdArgs.toArray(new String[cmdArgs.size()]);
  324. }
  325. @Override
  326. public void kill() throws IOException {
  327. kill(hadoopConfDir);
  328. }
  329. @Override
  330. public void start() throws IOException {
  331. start(hadoopConfDir);
  332. }
  333. public void start(String newConfLocation) throws IOException {
  334. ShellCommandExecutor cme = buildCommandExecutor(START_COMMAND,
  335. newConfLocation);
  336. cme.execute();
  337. String output = cme.getOutput();
  338. if (!output.isEmpty()) { //getOutput() never returns null value
  339. if (output.toLowerCase().contains("error")) {
  340. LOG.warn("Error is detected.");
  341. throw new IOException("Start error\n" + output);
  342. }
  343. }
  344. }
  345. public void kill(String newConfLocation) throws IOException {
  346. ShellCommandExecutor cme
  347. = buildCommandExecutor(STOP_COMMAND, newConfLocation);
  348. cme.execute();
  349. String output = cme.getOutput();
  350. if (!output.isEmpty()) { //getOutput() never returns null value
  351. if (output.toLowerCase().contains("error")) {
  352. LOG.info("Error is detected.");
  353. throw new IOException("Kill error\n" + output);
  354. }
  355. }
  356. }
  357. @Override
  358. public Enum<?> getRole() {
  359. return role;
  360. }
  361. }
  362. }