PageRenderTime 27ms CodeModel.GetById 26ms RepoModel.GetById 0ms app.codeStats 0ms

/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java

https://github.com/rbodkin/hadoop-common
Java | 465 lines | 390 code | 58 blank | 17 comment | 18 complexity | 350931105aaf84331e55878c1d739fda MD5 | raw file
  1. /**
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.hadoop.mapreduce.v2.app;
  19. import com.google.common.collect.Iterators;
  20. import com.google.common.collect.Lists;
  21. import com.google.common.collect.Maps;
  22. import java.util.Collection;
  23. import java.util.Iterator;
  24. import java.util.List;
  25. import java.util.Map;
  26. import org.apache.hadoop.mapred.ShuffleHandler;
  27. import org.apache.hadoop.mapreduce.FileSystemCounter;
  28. import org.apache.hadoop.mapreduce.JobACL;
  29. import org.apache.hadoop.mapreduce.JobCounter;
  30. import org.apache.hadoop.mapreduce.TaskCounter;
  31. import org.apache.hadoop.mapreduce.v2.api.records.Counters;
  32. import org.apache.hadoop.mapreduce.v2.api.records.JobId;
  33. import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
  34. import org.apache.hadoop.mapreduce.v2.api.records.JobState;
  35. import org.apache.hadoop.mapreduce.v2.api.records.Phase;
  36. import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
  37. import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
  38. import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
  39. import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
  40. import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
  41. import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
  42. import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
  43. import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
  44. import org.apache.hadoop.mapreduce.TypeConverter;
  45. import org.apache.hadoop.mapreduce.v2.app.job.Job;
  46. import org.apache.hadoop.mapreduce.v2.app.job.Task;
  47. import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
  48. import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
  49. import org.apache.hadoop.security.UserGroupInformation;
  50. import org.apache.hadoop.yarn.MockApps;
  51. import org.apache.hadoop.yarn.api.records.ApplicationId;
  52. import org.apache.hadoop.yarn.api.records.ContainerId;
  53. import org.apache.hadoop.yarn.util.Records;
  54. public class MockJobs extends MockApps {
  55. static final Iterator<JobState> JOB_STATES = Iterators.cycle(
  56. JobState.values());
  57. static final Iterator<TaskState> TASK_STATES = Iterators.cycle(
  58. TaskState.values());
  59. static final Iterator<TaskAttemptState> TASK_ATTEMPT_STATES = Iterators.cycle(
  60. TaskAttemptState.values());
  61. static final Iterator<TaskType> TASK_TYPES = Iterators.cycle(
  62. TaskType.values());
  63. static final Iterator<JobCounter> JOB_COUNTERS = Iterators.cycle(
  64. JobCounter.values());
  65. static final Iterator<FileSystemCounter> FS_COUNTERS = Iterators.cycle(
  66. FileSystemCounter.values());
  67. static final Iterator<TaskCounter> TASK_COUNTERS = Iterators.cycle(
  68. TaskCounter.values());
  69. static final Iterator<String> FS_SCHEMES = Iterators.cycle("FILE", "HDFS",
  70. "LAFS", "CEPH");
  71. static final Iterator<String> USER_COUNTER_GROUPS = Iterators.cycle(
  72. "com.company.project.subproject.component.subcomponent.UserDefinedSpecificSpecialTask$Counters",
  73. "PigCounters");
  74. static final Iterator<String> USER_COUNTERS = Iterators.cycle(
  75. "counter1", "counter2", "counter3");
  76. static final Iterator<Phase> PHASES = Iterators.cycle(Phase.values());
  77. static final Iterator<String> DIAGS = Iterators.cycle(
  78. "Error: java.lang.OutOfMemoryError: Java heap space",
  79. "Lost task tracker: tasktracker.domain/127.0.0.1:40879");
  80. static final int DT = 1000000; // ms
  81. public static String newJobName() {
  82. return newAppName();
  83. }
  84. public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp,
  85. int numTasksPerJob,
  86. int numAttemptsPerTask) {
  87. Map<JobId, Job> map = Maps.newHashMap();
  88. for (int j = 0; j < numJobsPerApp; ++j) {
  89. Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask);
  90. map.put(job.getID(), job);
  91. }
  92. return map;
  93. }
  94. public static JobId newJobID(ApplicationId appID, int i) {
  95. JobId id = Records.newRecord(JobId.class);
  96. id.setAppId(appID);
  97. id.setId(i);
  98. return id;
  99. }
  100. public static JobReport newJobReport(JobId id) {
  101. JobReport report = Records.newRecord(JobReport.class);
  102. report.setJobId(id);
  103. report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
  104. report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
  105. report.setMapProgress((float)Math.random());
  106. report.setReduceProgress((float)Math.random());
  107. report.setJobState(JOB_STATES.next());
  108. return report;
  109. }
  110. public static TaskReport newTaskReport(TaskId id) {
  111. TaskReport report = Records.newRecord(TaskReport.class);
  112. report.setTaskId(id);
  113. report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
  114. report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
  115. report.setProgress((float)Math.random());
  116. report.setCounters(newCounters());
  117. report.setTaskState(TASK_STATES.next());
  118. return report;
  119. }
  120. public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) {
  121. TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class);
  122. report.setTaskAttemptId(id);
  123. report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
  124. report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
  125. report.setPhase(PHASES.next());
  126. report.setTaskAttemptState(TASK_ATTEMPT_STATES.next());
  127. report.setProgress((float)Math.random());
  128. report.setCounters(newCounters());
  129. return report;
  130. }
  131. @SuppressWarnings("deprecation")
  132. public static Counters newCounters() {
  133. org.apache.hadoop.mapred.Counters hc =
  134. new org.apache.hadoop.mapred.Counters();
  135. for (JobCounter c : JobCounter.values()) {
  136. hc.findCounter(c).setValue((long)(Math.random() * 1000));
  137. }
  138. for (TaskCounter c : TaskCounter.values()) {
  139. hc.findCounter(c).setValue((long)(Math.random() * 1000));
  140. }
  141. int nc = FileSystemCounter.values().length * 4;
  142. for (int i = 0; i < nc; ++i) {
  143. for (FileSystemCounter c : FileSystemCounter.values()) {
  144. hc.findCounter(FS_SCHEMES.next(), c).
  145. setValue((long)(Math.random() * DT));
  146. }
  147. }
  148. for (int i = 0; i < 2 * 3; ++i) {
  149. hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next()).
  150. setValue((long)(Math.random() * 100000));
  151. }
  152. return TypeConverter.toYarn(hc);
  153. }
  154. public static Map<TaskAttemptId, TaskAttempt> newTaskAttempts(TaskId tid,
  155. int m) {
  156. Map<TaskAttemptId, TaskAttempt> map = Maps.newHashMap();
  157. for (int i = 0; i < m; ++i) {
  158. TaskAttempt ta = newTaskAttempt(tid, i);
  159. map.put(ta.getID(), ta);
  160. }
  161. return map;
  162. }
  163. public static TaskAttempt newTaskAttempt(TaskId tid, int i) {
  164. final TaskAttemptId taid = Records.newRecord(TaskAttemptId.class);
  165. taid.setTaskId(tid);
  166. taid.setId(i);
  167. final TaskAttemptReport report = newTaskAttemptReport(taid);
  168. final List<String> diags = Lists.newArrayList();
  169. diags.add(DIAGS.next());
  170. return new TaskAttempt() {
  171. @Override
  172. public TaskAttemptId getID() {
  173. return taid;
  174. }
  175. @Override
  176. public TaskAttemptReport getReport() {
  177. return report;
  178. }
  179. @Override
  180. public long getLaunchTime() {
  181. return 0;
  182. }
  183. @Override
  184. public long getFinishTime() {
  185. return 0;
  186. }
  187. @Override
  188. public int getShufflePort() {
  189. return ShuffleHandler.DEFAULT_SHUFFLE_PORT;
  190. }
  191. @Override
  192. public Counters getCounters() {
  193. return report.getCounters();
  194. }
  195. @Override
  196. public float getProgress() {
  197. return report.getProgress();
  198. }
  199. @Override
  200. public TaskAttemptState getState() {
  201. return report.getTaskAttemptState();
  202. }
  203. @Override
  204. public boolean isFinished() {
  205. switch (report.getTaskAttemptState()) {
  206. case SUCCEEDED:
  207. case FAILED:
  208. case KILLED: return true;
  209. }
  210. return false;
  211. }
  212. @Override
  213. public ContainerId getAssignedContainerID() {
  214. ContainerId id = Records.newRecord(ContainerId.class);
  215. id.setAppId(taid.getTaskId().getJobId().getAppId());
  216. return id;
  217. }
  218. @Override
  219. public String getNodeHttpAddress() {
  220. return "localhost:9999";
  221. }
  222. @Override
  223. public List<String> getDiagnostics() {
  224. return diags;
  225. }
  226. @Override
  227. public String getAssignedContainerMgrAddress() {
  228. return "localhost:9998";
  229. }
  230. };
  231. }
  232. public static Map<TaskId, Task> newTasks(JobId jid, int n, int m) {
  233. Map<TaskId, Task> map = Maps.newHashMap();
  234. for (int i = 0; i < n; ++i) {
  235. Task task = newTask(jid, i, m);
  236. map.put(task.getID(), task);
  237. }
  238. return map;
  239. }
  240. public static Task newTask(JobId jid, int i, int m) {
  241. final TaskId tid = Records.newRecord(TaskId.class);
  242. tid.setJobId(jid);
  243. tid.setId(i);
  244. tid.setTaskType(TASK_TYPES.next());
  245. final TaskReport report = newTaskReport(tid);
  246. final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m);
  247. return new Task() {
  248. @Override
  249. public TaskId getID() {
  250. return tid;
  251. }
  252. @Override
  253. public TaskReport getReport() {
  254. return report;
  255. }
  256. @Override
  257. public Counters getCounters() {
  258. return report.getCounters();
  259. }
  260. @Override
  261. public float getProgress() {
  262. return report.getProgress();
  263. }
  264. @Override
  265. public TaskType getType() {
  266. return tid.getTaskType();
  267. }
  268. @Override
  269. public Map<TaskAttemptId, TaskAttempt> getAttempts() {
  270. return attempts;
  271. }
  272. @Override
  273. public TaskAttempt getAttempt(TaskAttemptId attemptID) {
  274. return attempts.get(attemptID);
  275. }
  276. @Override
  277. public boolean isFinished() {
  278. switch (report.getTaskState()) {
  279. case SUCCEEDED:
  280. case KILLED:
  281. case FAILED: return true;
  282. }
  283. return false;
  284. }
  285. @Override
  286. public boolean canCommit(TaskAttemptId taskAttemptID) {
  287. return false;
  288. }
  289. @Override
  290. public TaskState getState() {
  291. return report.getTaskState();
  292. }
  293. };
  294. }
  295. public static Counters getCounters(Collection<Task> tasks) {
  296. Counters counters = JobImpl.newCounters();
  297. return JobImpl.incrTaskCounters(counters, tasks);
  298. }
  299. static class TaskCount {
  300. int maps;
  301. int reduces;
  302. int completedMaps;
  303. int completedReduces;
  304. void incr(Task task) {
  305. TaskType type = task.getType();
  306. boolean finished = task.isFinished();
  307. if (type == TaskType.MAP) {
  308. if (finished) {
  309. ++completedMaps;
  310. }
  311. ++maps;
  312. } else if (type == TaskType.REDUCE) {
  313. if (finished) {
  314. ++completedReduces;
  315. }
  316. ++reduces;
  317. }
  318. }
  319. }
  320. static TaskCount getTaskCount(Collection<Task> tasks) {
  321. TaskCount tc = new TaskCount();
  322. for (Task task : tasks) {
  323. tc.incr(task);
  324. }
  325. return tc;
  326. }
  327. public static Job newJob(ApplicationId appID, int i, int n, int m) {
  328. final JobId id = newJobID(appID, i);
  329. final String name = newJobName();
  330. final JobReport report = newJobReport(id);
  331. final Map<TaskId, Task> tasks = newTasks(id, n, m);
  332. final TaskCount taskCount = getTaskCount(tasks.values());
  333. final Counters counters = getCounters(tasks.values());
  334. return new Job() {
  335. @Override
  336. public JobId getID() {
  337. return id;
  338. }
  339. @Override
  340. public String getName() {
  341. return name;
  342. }
  343. @Override
  344. public JobState getState() {
  345. return report.getJobState();
  346. }
  347. @Override
  348. public JobReport getReport() {
  349. return report;
  350. }
  351. @Override
  352. public Counters getCounters() {
  353. return counters;
  354. }
  355. @Override
  356. public Map<TaskId, Task> getTasks() {
  357. return tasks;
  358. }
  359. @Override
  360. public Task getTask(TaskId taskID) {
  361. return tasks.get(taskID);
  362. }
  363. @Override
  364. public int getTotalMaps() {
  365. return taskCount.maps;
  366. }
  367. @Override
  368. public int getTotalReduces() {
  369. return taskCount.reduces;
  370. }
  371. @Override
  372. public int getCompletedMaps() {
  373. return taskCount.completedMaps;
  374. }
  375. @Override
  376. public int getCompletedReduces() {
  377. return taskCount.completedReduces;
  378. }
  379. @Override
  380. public boolean isUber() {
  381. return false;
  382. }
  383. @Override
  384. public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
  385. int fromEventId, int maxEvents) {
  386. return null;
  387. }
  388. @Override
  389. public Map<TaskId, Task> getTasks(TaskType taskType) {
  390. throw new UnsupportedOperationException("Not supported yet.");
  391. }
  392. @Override
  393. public List<String> getDiagnostics() {
  394. throw new UnsupportedOperationException("Not supported yet.");
  395. }
  396. @Override
  397. public boolean checkAccess(UserGroupInformation callerUGI,
  398. JobACL jobOperation) {
  399. return true;
  400. }
  401. @Override
  402. public String getUserName() {
  403. throw new UnsupportedOperationException("Not supported yet.");
  404. }
  405. };
  406. }
  407. }