/webportal/src/main/java/org/ala/spatial/analysis/web/AddToolMaxentComposer.java

http://alageospatialportal.googlecode.com/ · Java · 560 lines · 423 code · 76 blank · 61 comment · 97 complexity · e720d7af4df006aee1e582a38abff3c6 MD5 · raw file

  1. /*
  2. * To change this template, choose Tools | Templates
  3. * and open the template in the editor.
  4. */
  5. package org.ala.spatial.analysis.web;
  6. import au.com.bytecode.opencsv.CSVReader;
  7. import au.org.emii.portal.composer.MapComposer;
  8. import au.org.emii.portal.menu.MapLayer;
  9. import au.org.emii.portal.menu.MapLayerMetadata;
  10. import au.org.emii.portal.util.LayerUtilities;
  11. import java.io.StringReader;
  12. import java.net.URL;
  13. import java.net.URLEncoder;
  14. import java.util.ArrayList;
  15. import java.util.HashMap;
  16. import java.util.HashSet;
  17. import java.util.List;
  18. import java.util.Map;
  19. import org.ala.spatial.data.Facet;
  20. import org.ala.spatial.data.Query;
  21. import org.ala.spatial.util.CommonData;
  22. import org.ala.spatial.data.QueryField;
  23. import org.ala.spatial.data.QueryUtil;
  24. import org.ala.spatial.data.BiocacheQuery;
  25. import org.ala.spatial.data.UploadQuery;
  26. import org.ala.spatial.exception.NoSpeciesFoundException;
  27. import org.ala.spatial.util.SelectedArea;
  28. import org.apache.commons.httpclient.HttpClient;
  29. import org.apache.commons.httpclient.methods.GetMethod;
  30. import org.apache.commons.httpclient.methods.PostMethod;
  31. import org.zkoss.zk.ui.Executions;
  32. import org.zkoss.zk.ui.event.Event;
  33. import org.zkoss.zul.*;
  34. /**
  35. *
  36. * @author ajay
  37. */
  38. public class AddToolMaxentComposer extends AddToolComposer {
  39. int generation_count = 1;
  40. private Checkbox chkJackknife;
  41. private Checkbox chkRCurves;
  42. private Textbox txtTestPercentage;
  43. // private String taxon = "";
  44. @Override
  45. public void afterCompose() {
  46. super.afterCompose();
  47. this.selectedMethod = "Prediction";
  48. this.totalSteps = 5;
  49. this.setIncludeAnalysisLayersForAnyQuery(true);
  50. this.loadAreaLayers();
  51. this.loadSpeciesLayers();
  52. this.loadGridLayers(true, true, true);
  53. this.updateWindowTitle();
  54. }
  55. public void onClick$btnClearSelectionCtx(Event event) {
  56. // check if lbListLayers is empty as well,
  57. // if so, then disable the next button
  58. if (lbListLayers.getSelectedCount() == 0) {
  59. btnOk.setDisabled(true);
  60. }
  61. }
  62. @Override
  63. public void onLastPanel() {
  64. super.onLastPanel();
  65. //this.updateName("My Prediction model for " + rgSpecies.getSelectedItem().getLabel());
  66. this.updateName(getMapComposer().getNextAreaLayerName("My Prediction"));
  67. }
  68. @Override
  69. public boolean onFinish() {
  70. //super.onFinish();
  71. if (!hasEstimated && !isUserLoggedIn()) {
  72. checkEstimate();
  73. return false;
  74. }
  75. Query query = getSelectedSpecies();
  76. if (query == null) {
  77. getMapComposer().showMessage("There is a problem selecting the species. Try to select the species again", this);
  78. return false;
  79. }
  80. if (searchSpeciesACComp.getAutoComplete().getSelectedItem() != null) {
  81. getMapComposer().mapSpeciesFromAutocompleteComponent(searchSpeciesACComp, getSelectedArea(), getGeospatialKosher());
  82. } else if (query != null && rgSpecies.getSelectedItem() != null && rgSpecies.getSelectedItem().getValue().equals("multiple")) {
  83. getMapComposer().mapSpecies(query, "Species assemblage", "species", 0, LayerUtilities.SPECIES, null, -1, MapComposer.DEFAULT_POINT_SIZE, MapComposer.DEFAULT_POINT_OPACITY, MapComposer.nextColour());
  84. }
  85. System.out.println("Maxent Selected layers:");
  86. System.out.println(getSelectedLayers());
  87. return runmaxent();
  88. }
  89. SelectedArea sa = null;
  90. Query query = null;
  91. String sbenvsel = "";
  92. String[] speciesData = null;
  93. private void setupData() throws Exception {
  94. if (query == null) {
  95. sa = getSelectedArea();
  96. query = QueryUtil.queryFromSelectedArea(getSelectedSpecies(), sa, false, getGeospatialKosher());
  97. sbenvsel = getSelectedLayers();
  98. speciesData = getSpeciesData(query);
  99. }
  100. }
  101. @Override
  102. public long getEstimate() {
  103. try {
  104. setupData();
  105. // sa = getSelectedArea();
  106. // query = QueryUtil.queryFromSelectedArea(getSelectedSpecies(), sa, false, getGeospatialKosher());
  107. //
  108. // sbenvsel = getSelectedLayers();
  109. StringBuffer sbProcessUrl = new StringBuffer();
  110. sbProcessUrl.append(CommonData.satServer + "/ws/maxent/estimate?");
  111. sbProcessUrl.append("taxonid=" + URLEncoder.encode(query.getName(), "UTF-8"));
  112. sbProcessUrl.append("&taxonlsid=" + URLEncoder.encode(query.getQ(), "UTF-8"));
  113. sbProcessUrl.append("&envlist=" + URLEncoder.encode(sbenvsel.toString(), "UTF-8"));
  114. if (chkJackknife.isChecked()) {
  115. sbProcessUrl.append("&chkJackknife=on");
  116. }
  117. if (chkRCurves.isChecked()) {
  118. sbProcessUrl.append("&chkResponseCurves=on");
  119. }
  120. sbProcessUrl.append("&txtTestPercentage=" + txtTestPercentage.getValue());
  121. // System.out.println("Calling Maxent: " + sbProcessUrl.toString() + "\narea: " + area);
  122. HttpClient client = new HttpClient();
  123. PostMethod get = new PostMethod(sbProcessUrl.toString());
  124. String area = null;
  125. if (sa.getMapLayer() != null && sa.getMapLayer().getData("envelope") != null) {
  126. area = "ENVELOPE(" + (String) sa.getMapLayer().getData("envelope") + ")";
  127. } else {
  128. area = sa.getWkt();
  129. }
  130. if (getSelectedArea() != null) {
  131. get.addParameter("area", area);
  132. }
  133. System.out.println("Getting species data");
  134. // speciesData = getSpeciesData(query);
  135. System.out.print("checking for species data...");
  136. //check for no data
  137. if (speciesData[0] == null || speciesData[0].trim().equals("")) {
  138. System.out.println("none available");
  139. if (speciesData[1] == null) {
  140. getMapComposer().showMessage("No records available for Prediction", this);
  141. } else {
  142. getMapComposer().showMessage("All species and records selected are marked as sensitive", this);
  143. }
  144. return -1;
  145. } else {
  146. System.out.println("available");
  147. }
  148. get.addParameter("species", speciesData[0]);
  149. if (speciesData[1] != null) {
  150. get.addParameter("removedspecies", speciesData[1]);
  151. }
  152. get.addRequestHeader("Accept", "text/plain");
  153. int result = client.executeMethod(get);
  154. String estimate = get.getResponseBodyAsString();
  155. return Long.valueOf(estimate);
  156. } catch (Exception e) {
  157. System.out.println("Unable to get estimates");
  158. e.printStackTrace(System.out);
  159. }
  160. return -1;
  161. }
  162. public boolean runmaxent() {
  163. try {
  164. setupData();
  165. //SelectedArea sa = getSelectedArea();
  166. //Query query = QueryUtil.queryFromSelectedArea(getSelectedSpecies(), sa, false, getGeospatialKosher());
  167. //String sbenvsel = getSelectedLayers();
  168. //String area = getSelectedArea();
  169. //String taxonlsid = taxon;
  170. if (!searchSpeciesACComp.hasValidItemSelected()) {
  171. //MapLayer ml = getMapComposer().getMapLayerSpeciesLSID(taxon);
  172. // taxonlsid = ml.getMapLayerMetadata().getSpeciesDisplayLsid();
  173. }
  174. // if (isSensitiveSpecies(taxon)) {
  175. // return;
  176. // }
  177. System.out.println("Selected species: " + query.getName());
  178. System.out.println("Selected species query: " + query.getQ());
  179. System.out.println("Selected env vars");
  180. System.out.println(sbenvsel.toString());
  181. System.out.println("Selected options: ");
  182. System.out.println("Jackknife: " + chkJackknife.isChecked());
  183. System.out.println("Response curves: " + chkRCurves.isChecked());
  184. System.out.println("Test per: " + txtTestPercentage.getValue());
  185. StringBuffer sbProcessUrl = new StringBuffer();
  186. sbProcessUrl.append(CommonData.satServer + "/ws/maxent?");
  187. sbProcessUrl.append("taxonid=" + URLEncoder.encode(query.getName(), "UTF-8"));
  188. sbProcessUrl.append("&taxonlsid=" + URLEncoder.encode(query.getQ(), "UTF-8"));
  189. sbProcessUrl.append("&envlist=" + URLEncoder.encode(sbenvsel.toString(), "UTF-8"));
  190. if (chkJackknife.isChecked()) {
  191. sbProcessUrl.append("&chkJackknife=on");
  192. }
  193. if (chkRCurves.isChecked()) {
  194. sbProcessUrl.append("&chkResponseCurves=on");
  195. }
  196. sbProcessUrl.append("&txtTestPercentage=" + txtTestPercentage.getValue());
  197. // System.out.println("Calling Maxent: " + sbProcessUrl.toString() + "\narea: " + area);
  198. HttpClient client = new HttpClient();
  199. PostMethod get = new PostMethod(sbProcessUrl.toString());
  200. String area = null;
  201. if (sa.getMapLayer() != null && sa.getMapLayer().getData("envelope") != null) {
  202. area = "ENVELOPE(" + (String) sa.getMapLayer().getData("envelope") + ")";
  203. } else {
  204. area = sa.getWkt();
  205. }
  206. if (getSelectedArea() != null) {
  207. get.addParameter("area", area);
  208. }
  209. System.out.println("Getting species data");
  210. //String[] speciesData = getSpeciesData(query);
  211. System.out.print("checking for species data...");
  212. //check for no data
  213. if (speciesData[0] == null || speciesData[0].trim().equals("")) {
  214. System.out.println("none available");
  215. if (speciesData[1] == null) {
  216. getMapComposer().showMessage("No records available for Prediction", this);
  217. } else {
  218. getMapComposer().showMessage("All species and records selected are marked as sensitive", this);
  219. }
  220. return false;
  221. } else {
  222. System.out.println("available");
  223. }
  224. // System.out.println("displaying species data: '");
  225. // System.out.println(speciesData[0]);
  226. // System.out.println("'");
  227. get.addParameter("species", speciesData[0]);
  228. if (speciesData[1] != null) {
  229. get.addParameter("removedspecies", speciesData[1]);
  230. }
  231. get.addRequestHeader("Accept", "text/plain");
  232. int result = client.executeMethod(get);
  233. pid = get.getResponseBodyAsString();
  234. // this.taxon = taxon;
  235. openProgressBar();
  236. try {
  237. StringBuffer sbParams = new StringBuffer();
  238. sbParams.append("Species: " + query.getName());
  239. sbParams.append(";Query: " + query.getFullQ(false));
  240. sbParams.append(";Jackknife: " + chkJackknife.isChecked());
  241. sbParams.append(";Response curves: " + chkRCurves.isChecked());
  242. sbParams.append(";Test per: " + txtTestPercentage.getValue());
  243. Map attrs = new HashMap();
  244. attrs.put("actionby", "user");
  245. attrs.put("actiontype", "analysis");
  246. //attrs.put("lsid", taxonlsid);
  247. attrs.put("useremail", "spatialuser");
  248. attrs.put("processid", pid);
  249. attrs.put("sessionid", "");
  250. attrs.put("layers", sbenvsel.toString());
  251. attrs.put("method", "maxent");
  252. attrs.put("params", sbParams.toString());
  253. attrs.put("downloadfile", "");
  254. getMapComposer().updateUserLog(attrs, "analysis result: " + CommonData.satServer + "/output/maxent/" + pid + "/species.html");
  255. String options = "";
  256. options += "Jackknife: " + chkJackknife.isChecked();
  257. options += ";Response curves: " + chkRCurves.isChecked();
  258. options += ";Test per: " + txtTestPercentage.getValue();
  259. if (query instanceof BiocacheQuery) {
  260. BiocacheQuery bq = (BiocacheQuery) query;
  261. options = bq.getWS() + "|" + bq.getBS() + "|" + bq.getFullQ(false) + "|" + options;
  262. remoteLogger.logMapAnalysis(tToolName.getValue(), "Tool - Prediction", area, bq.getLsids(), sbenvsel.toString(), pid, options, "STARTED");
  263. } else {
  264. remoteLogger.logMapAnalysis(tToolName.getValue(), "Tool - Prediction", area, query.getName() + "__" + query.getQ(), sbenvsel.toString(), pid, options, "STARTED");
  265. }
  266. } catch (Exception e) {
  267. e.printStackTrace();
  268. }
  269. this.setVisible(false);
  270. return true;
  271. // } catch (NoSpeciesFoundException e) {
  272. // System.out.println("Maxent error: NoSpeciesFoundException");
  273. // e.printStackTrace(System.out);
  274. // getMapComposer().showMessage("No species occurrences found in the current area. \nPlease select a larger area and re-run the analysis", this);
  275. } catch (Exception e) {
  276. System.out.println("Maxent error: ");
  277. e.printStackTrace(System.out);
  278. getMapComposer().showMessage("Unknown error.", this);
  279. }
  280. return false;
  281. }
  282. void openProgressBar() {
  283. ProgressWCController window = (ProgressWCController) Executions.createComponents("WEB-INF/zul/AnalysisProgress.zul", getMapComposer(), null);
  284. window.parent = this;
  285. window.start(pid, "Prediction", isBackgroundProcess);
  286. try {
  287. window.doModal();
  288. } catch (Exception e) {
  289. e.printStackTrace();
  290. }
  291. }
  292. public void loadMap(Event event) {
  293. String mapurl = CommonData.geoServer + "/wms?service=WMS&version=1.1.0&request=GetMap&layers=ALA:species_" + pid + "&styles=alastyles&FORMAT=image%2Fpng";
  294. String legendurl = CommonData.geoServer
  295. + "/wms?REQUEST=GetLegendGraphic&VERSION=1.0.0&FORMAT=image/png&WIDTH=10&HEIGHT=1"
  296. + "&LAYER=ALA:species_" + pid
  297. + "&STYLE=alastyles";
  298. System.out.println(legendurl);
  299. //get job inputs
  300. String speciesName = "";
  301. try {
  302. for (String s : getJob("inputs").split(";")) {
  303. if (s.startsWith("scientificName")) {
  304. speciesName = s.split(":")[1];
  305. if (speciesName != null && speciesName.length() > 1) {
  306. speciesName = speciesName.substring(0, 1).toUpperCase() + speciesName.substring(1);
  307. }
  308. break;
  309. }
  310. }
  311. } catch (Exception e) {
  312. e.printStackTrace();
  313. }
  314. String layername = tToolName.getValue();
  315. getMapComposer().addWMSLayer("species_" + pid, layername, mapurl, (float) 0.5, null, legendurl, LayerUtilities.MAXENT, null, null);
  316. MapLayer ml = getMapComposer().getMapLayer("species_" + pid);
  317. ml.setData("pid", pid);
  318. String infoUrl = CommonData.satServer + "/output/maxent/" + pid + "/species.html";
  319. MapLayerMetadata md = ml.getMapLayerMetadata();
  320. if (md == null) {
  321. md = new MapLayerMetadata();
  322. ml.setMapLayerMetadata(md);
  323. }
  324. md.setMoreInfo(infoUrl + "\nMaxent Output\npid:" + pid);
  325. md.setId(Long.valueOf(pid));
  326. try {
  327. // set off the download as well
  328. String fileUrl = CommonData.satServer + "/ws/download/" + pid;
  329. Filedownload.save(new URL(fileUrl).openStream(), "application/zip", tToolName.getValue().replaceAll(" ", "_") + ".zip"); // "ALA_Prediction_"+pid+".zip"
  330. } catch (Exception ex) {
  331. System.out.println("Error generating download for prediction model:");
  332. ex.printStackTrace(System.out);
  333. }
  334. this.detach();
  335. //getMapComposer().showMessage("Reference number to retrieve results: " + pid);
  336. //showInfoWindow("/output/maxent/" + pid + "/species.html");
  337. //perform intersection on user uploaded layers so you can facet on this layer
  338. getMapComposer().addAnalysisLayerToUploadedCoordinates("species_" + pid, tToolName.getValue());
  339. }
  340. String getJob(String type) {
  341. try {
  342. StringBuffer sbProcessUrl = new StringBuffer();
  343. sbProcessUrl.append(CommonData.satServer + "/ws/jobs/").append(type).append("?pid=").append(pid);
  344. System.out.println(sbProcessUrl.toString());
  345. HttpClient client = new HttpClient();
  346. GetMethod get = new GetMethod(sbProcessUrl.toString());
  347. get.addRequestHeader("Accept", "text/plain");
  348. int result = client.executeMethod(get);
  349. String slist = get.getResponseBodyAsString();
  350. System.out.println(slist);
  351. return slist;
  352. } catch (Exception e) {
  353. e.printStackTrace();
  354. }
  355. return "";
  356. }
  357. /**
  358. * get CSV of speciesName, longitude, latitude in [0] and
  359. *
  360. * @param selectedSpecies
  361. * @param area
  362. * @return
  363. */
  364. private String[] getSpeciesData(Query query) throws NoSpeciesFoundException {
  365. if (query instanceof UploadQuery) {
  366. //no sensitive records in upload
  367. ArrayList<QueryField> fields = new ArrayList<QueryField>();
  368. String lsidFieldName = query.getSpeciesIdFieldName();
  369. QueryField qf = null;
  370. if (lsidFieldName != null) {
  371. qf = new QueryField(query.getSpeciesIdFieldName());
  372. qf.setStored(true);
  373. fields.add(qf);
  374. }
  375. double[] points = query.getPoints(fields);
  376. StringBuilder sb = null;
  377. if (points != null) {
  378. sb = new StringBuilder();
  379. for (int i = 0; i < points.length; i += 2) {
  380. if (sb.length() == 0) {
  381. //header
  382. sb.append("species,longitude,latitude");
  383. }
  384. sb.append("\nspecies,").append(points[i]).append(",").append(points[i + 1]);
  385. }
  386. }
  387. String[] out = {((sb == null) ? null : sb.toString()), null};
  388. return out;
  389. } else {
  390. //identify sensitive species records
  391. List<String[]> sensitiveSpecies = null;
  392. try {
  393. String sensitiveSpeciesRaw = new BiocacheQuery(null, null, "sensitive:[* TO *]", null, false, null).speciesList();
  394. CSVReader csv = new CSVReader(new StringReader(sensitiveSpeciesRaw));
  395. sensitiveSpecies = csv.readAll();
  396. csv.close();
  397. } catch (Exception e) {
  398. e.printStackTrace();
  399. }
  400. HashSet<String> sensitiveSpeciesFound = new HashSet<String>();
  401. HashSet<String> sensitiveLsids = new HashSet<String>();
  402. //add to 'identified' sensitive list
  403. try {
  404. CSVReader csv = new CSVReader(new StringReader(query.speciesList()));
  405. List<String[]> fullSpeciesList = csv.readAll();
  406. csv.close();
  407. for (int i = 0; i < fullSpeciesList.size(); i++) {
  408. String[] sa = fullSpeciesList.get(i);
  409. for (String[] ss : sensitiveSpecies) {
  410. if (sa != null && sa.length > 4
  411. && ss != null && ss.length > 4
  412. && sa[4].equals(ss[4])) {
  413. sensitiveSpeciesFound.add(ss[4] + "," + ss[1] + "," + ss[3]);
  414. sensitiveLsids.add(ss[4]);
  415. break;
  416. }
  417. }
  418. }
  419. } catch (Exception e) {
  420. e.printStackTrace();
  421. }
  422. //remove sensitive records that will not be LSID matched
  423. Query maxentQuery = query.newFacet(new Facet("sensitive", "[* TO *]", false), false);
  424. ArrayList<QueryField> fields = new ArrayList<QueryField>();
  425. String lsidFieldName = maxentQuery.getSpeciesIdFieldName();
  426. QueryField qf = null;
  427. if (lsidFieldName != null) {
  428. qf = new QueryField(maxentQuery.getSpeciesIdFieldName());
  429. qf.setStored(true);
  430. fields.add(qf);
  431. }
  432. double[] points = maxentQuery.getPoints(fields);
  433. StringBuilder sb = null;
  434. if (points != null) {
  435. sb = new StringBuilder();
  436. for (int i = 0; i < points.length; i += 2) {
  437. boolean isSensitive = false;
  438. if (qf != null) {
  439. String lsid = qf.getAsString(i / 2);
  440. isSensitive = sensitiveLsids.contains(lsid);
  441. }
  442. if (!isSensitive) {
  443. if (sb.length() == 0) {
  444. //header
  445. sb.append("species,longitude,latitude");
  446. }
  447. sb.append("\nspecies,").append(points[i]).append(",").append(points[i + 1]);
  448. }
  449. }
  450. }
  451. //collate sensitive species found, no header
  452. StringBuilder sen = new StringBuilder();
  453. for (String s : sensitiveSpeciesFound) {
  454. sen.append(s).append("\n");
  455. }
  456. String[] out = {((sb == null) ? null : sb.toString()), (sen.length() == 0) ? null : sen.toString()};
  457. return out;
  458. }
  459. }
  460. @Override
  461. void fixFocus() {
  462. switch (currentStep) {
  463. case 1:
  464. rgArea.setFocus(true);
  465. break;
  466. case 2:
  467. if (rSpeciesSearch.isChecked()) {
  468. searchSpeciesACComp.getAutoComplete().setFocus(true);
  469. } else {
  470. rgSpecies.setFocus(true);
  471. }
  472. break;
  473. case 3:
  474. lbListLayers.setFocus(true);
  475. break;
  476. case 4:
  477. chkJackknife.setFocus(true);
  478. break;
  479. case 5:
  480. tToolName.setFocus(true);
  481. break;
  482. }
  483. }
  484. }