/webportal/src/main/java/org/ala/spatial/analysis/web/AddToolMaxentComposer.java
Java | 560 lines | 423 code | 76 blank | 61 comment | 97 complexity | e720d7af4df006aee1e582a38abff3c6 MD5 | raw file
1/* 2 * To change this template, choose Tools | Templates 3 * and open the template in the editor. 4 */ 5package org.ala.spatial.analysis.web; 6 7import au.com.bytecode.opencsv.CSVReader; 8import au.org.emii.portal.composer.MapComposer; 9import au.org.emii.portal.menu.MapLayer; 10import au.org.emii.portal.menu.MapLayerMetadata; 11import au.org.emii.portal.util.LayerUtilities; 12import java.io.StringReader; 13import java.net.URL; 14import java.net.URLEncoder; 15import java.util.ArrayList; 16import java.util.HashMap; 17import java.util.HashSet; 18import java.util.List; 19import java.util.Map; 20import org.ala.spatial.data.Facet; 21import org.ala.spatial.data.Query; 22import org.ala.spatial.util.CommonData; 23import org.ala.spatial.data.QueryField; 24import org.ala.spatial.data.QueryUtil; 25import org.ala.spatial.data.BiocacheQuery; 26import org.ala.spatial.data.UploadQuery; 27import org.ala.spatial.exception.NoSpeciesFoundException; 28import org.ala.spatial.util.SelectedArea; 29import org.apache.commons.httpclient.HttpClient; 30import org.apache.commons.httpclient.methods.GetMethod; 31import org.apache.commons.httpclient.methods.PostMethod; 32import org.zkoss.zk.ui.Executions; 33import org.zkoss.zk.ui.event.Event; 34import org.zkoss.zul.*; 35 36/** 37 * 38 * @author ajay 39 */ 40public class AddToolMaxentComposer extends AddToolComposer { 41 42 int generation_count = 1; 43 private Checkbox chkJackknife; 44 private Checkbox chkRCurves; 45 private Textbox txtTestPercentage; 46// private String taxon = ""; 47 48 @Override 49 public void afterCompose() { 50 super.afterCompose(); 51 52 this.selectedMethod = "Prediction"; 53 this.totalSteps = 5; 54 55 this.setIncludeAnalysisLayersForAnyQuery(true); 56 57 this.loadAreaLayers(); 58 this.loadSpeciesLayers(); 59 this.loadGridLayers(true, true, true); 60 this.updateWindowTitle(); 61 62 } 63 64 public void onClick$btnClearSelectionCtx(Event event) { 65 // check if lbListLayers is empty as well, 66 // if so, then disable the next button 67 if (lbListLayers.getSelectedCount() == 0) { 68 btnOk.setDisabled(true); 69 } 70 } 71 72 @Override 73 public void onLastPanel() { 74 super.onLastPanel(); 75 //this.updateName("My Prediction model for " + rgSpecies.getSelectedItem().getLabel()); 76 this.updateName(getMapComposer().getNextAreaLayerName("My Prediction")); 77 } 78 79 @Override 80 public boolean onFinish() { 81 //super.onFinish(); 82 83 if (!hasEstimated && !isUserLoggedIn()) { 84 checkEstimate(); 85 return false; 86 } 87 88 Query query = getSelectedSpecies(); 89 if (query == null) { 90 getMapComposer().showMessage("There is a problem selecting the species. Try to select the species again", this); 91 return false; 92 } 93 if (searchSpeciesACComp.getAutoComplete().getSelectedItem() != null) { 94 getMapComposer().mapSpeciesFromAutocompleteComponent(searchSpeciesACComp, getSelectedArea(), getGeospatialKosher()); 95 } else if (query != null && rgSpecies.getSelectedItem() != null && rgSpecies.getSelectedItem().getValue().equals("multiple")) { 96 getMapComposer().mapSpecies(query, "Species assemblage", "species", 0, LayerUtilities.SPECIES, null, -1, MapComposer.DEFAULT_POINT_SIZE, MapComposer.DEFAULT_POINT_OPACITY, MapComposer.nextColour()); 97 } 98 99 System.out.println("Maxent Selected layers:"); 100 System.out.println(getSelectedLayers()); 101 102 return runmaxent(); 103 } 104 105 SelectedArea sa = null; 106 Query query = null; 107 String sbenvsel = ""; 108 String[] speciesData = null; 109 110 private void setupData() throws Exception { 111 if (query == null) { 112 sa = getSelectedArea(); 113 query = QueryUtil.queryFromSelectedArea(getSelectedSpecies(), sa, false, getGeospatialKosher()); 114 115 sbenvsel = getSelectedLayers(); 116 speciesData = getSpeciesData(query); 117 } 118 } 119 120 @Override 121 public long getEstimate() { 122 try { 123 124 setupData(); 125 126// sa = getSelectedArea(); 127// query = QueryUtil.queryFromSelectedArea(getSelectedSpecies(), sa, false, getGeospatialKosher()); 128// 129// sbenvsel = getSelectedLayers(); 130 131 StringBuffer sbProcessUrl = new StringBuffer(); 132 sbProcessUrl.append(CommonData.satServer + "/ws/maxent/estimate?"); 133 sbProcessUrl.append("taxonid=" + URLEncoder.encode(query.getName(), "UTF-8")); 134 sbProcessUrl.append("&taxonlsid=" + URLEncoder.encode(query.getQ(), "UTF-8")); 135 sbProcessUrl.append("&envlist=" + URLEncoder.encode(sbenvsel.toString(), "UTF-8")); 136 if (chkJackknife.isChecked()) { 137 sbProcessUrl.append("&chkJackknife=on"); 138 } 139 if (chkRCurves.isChecked()) { 140 sbProcessUrl.append("&chkResponseCurves=on"); 141 } 142 sbProcessUrl.append("&txtTestPercentage=" + txtTestPercentage.getValue()); 143 144 // System.out.println("Calling Maxent: " + sbProcessUrl.toString() + "\narea: " + area); 145 146 HttpClient client = new HttpClient(); 147 PostMethod get = new PostMethod(sbProcessUrl.toString()); 148 149 String area = null; 150 if (sa.getMapLayer() != null && sa.getMapLayer().getData("envelope") != null) { 151 area = "ENVELOPE(" + (String) sa.getMapLayer().getData("envelope") + ")"; 152 } else { 153 area = sa.getWkt(); 154 } 155 if (getSelectedArea() != null) { 156 get.addParameter("area", area); 157 } 158 159 System.out.println("Getting species data"); 160// speciesData = getSpeciesData(query); 161 System.out.print("checking for species data..."); 162 //check for no data 163 if (speciesData[0] == null || speciesData[0].trim().equals("")) { 164 System.out.println("none available"); 165 if (speciesData[1] == null) { 166 getMapComposer().showMessage("No records available for Prediction", this); 167 } else { 168 getMapComposer().showMessage("All species and records selected are marked as sensitive", this); 169 } 170 return -1; 171 } else { 172 System.out.println("available"); 173 } 174 175 get.addParameter("species", speciesData[0]); 176 if (speciesData[1] != null) { 177 get.addParameter("removedspecies", speciesData[1]); 178 } 179 180 get.addRequestHeader("Accept", "text/plain"); 181 182 int result = client.executeMethod(get); 183 String estimate = get.getResponseBodyAsString(); 184 185 return Long.valueOf(estimate); 186 187 } catch (Exception e) { 188 System.out.println("Unable to get estimates"); 189 e.printStackTrace(System.out); 190 } 191 192 return -1; 193 } 194 195 public boolean runmaxent() { 196 try { 197 198 setupData(); 199 200 201 //SelectedArea sa = getSelectedArea(); 202 //Query query = QueryUtil.queryFromSelectedArea(getSelectedSpecies(), sa, false, getGeospatialKosher()); 203 204 //String sbenvsel = getSelectedLayers(); 205 206 //String area = getSelectedArea(); 207 //String taxonlsid = taxon; 208 if (!searchSpeciesACComp.hasValidItemSelected()) { 209 //MapLayer ml = getMapComposer().getMapLayerSpeciesLSID(taxon); 210 // taxonlsid = ml.getMapLayerMetadata().getSpeciesDisplayLsid(); 211 } 212 213// if (isSensitiveSpecies(taxon)) { 214// return; 215// } 216 217 System.out.println("Selected species: " + query.getName()); 218 System.out.println("Selected species query: " + query.getQ()); 219 System.out.println("Selected env vars"); 220 System.out.println(sbenvsel.toString()); 221 System.out.println("Selected options: "); 222 System.out.println("Jackknife: " + chkJackknife.isChecked()); 223 System.out.println("Response curves: " + chkRCurves.isChecked()); 224 System.out.println("Test per: " + txtTestPercentage.getValue()); 225 226 StringBuffer sbProcessUrl = new StringBuffer(); 227 sbProcessUrl.append(CommonData.satServer + "/ws/maxent?"); 228 sbProcessUrl.append("taxonid=" + URLEncoder.encode(query.getName(), "UTF-8")); 229 sbProcessUrl.append("&taxonlsid=" + URLEncoder.encode(query.getQ(), "UTF-8")); 230 sbProcessUrl.append("&envlist=" + URLEncoder.encode(sbenvsel.toString(), "UTF-8")); 231 if (chkJackknife.isChecked()) { 232 sbProcessUrl.append("&chkJackknife=on"); 233 } 234 if (chkRCurves.isChecked()) { 235 sbProcessUrl.append("&chkResponseCurves=on"); 236 } 237 sbProcessUrl.append("&txtTestPercentage=" + txtTestPercentage.getValue()); 238 239 // System.out.println("Calling Maxent: " + sbProcessUrl.toString() + "\narea: " + area); 240 241 HttpClient client = new HttpClient(); 242 PostMethod get = new PostMethod(sbProcessUrl.toString()); 243 244 String area = null; 245 if (sa.getMapLayer() != null && sa.getMapLayer().getData("envelope") != null) { 246 area = "ENVELOPE(" + (String) sa.getMapLayer().getData("envelope") + ")"; 247 } else { 248 area = sa.getWkt(); 249 } 250 if (getSelectedArea() != null) { 251 get.addParameter("area", area); 252 } 253 254 System.out.println("Getting species data"); 255 //String[] speciesData = getSpeciesData(query); 256 System.out.print("checking for species data..."); 257 //check for no data 258 if (speciesData[0] == null || speciesData[0].trim().equals("")) { 259 System.out.println("none available"); 260 if (speciesData[1] == null) { 261 getMapComposer().showMessage("No records available for Prediction", this); 262 } else { 263 getMapComposer().showMessage("All species and records selected are marked as sensitive", this); 264 } 265 return false; 266 } else { 267 System.out.println("available"); 268 } 269// System.out.println("displaying species data: '"); 270// System.out.println(speciesData[0]); 271// System.out.println("'"); 272 273 get.addParameter("species", speciesData[0]); 274 if (speciesData[1] != null) { 275 get.addParameter("removedspecies", speciesData[1]); 276 } 277 278 get.addRequestHeader("Accept", "text/plain"); 279 280 int result = client.executeMethod(get); 281 pid = get.getResponseBodyAsString(); 282// this.taxon = taxon; 283 284 openProgressBar(); 285 286 try { 287 StringBuffer sbParams = new StringBuffer(); 288 sbParams.append("Species: " + query.getName()); 289 sbParams.append(";Query: " + query.getFullQ(false)); 290 sbParams.append(";Jackknife: " + chkJackknife.isChecked()); 291 sbParams.append(";Response curves: " + chkRCurves.isChecked()); 292 sbParams.append(";Test per: " + txtTestPercentage.getValue()); 293 294 Map attrs = new HashMap(); 295 attrs.put("actionby", "user"); 296 attrs.put("actiontype", "analysis"); 297 //attrs.put("lsid", taxonlsid); 298 attrs.put("useremail", "spatialuser"); 299 attrs.put("processid", pid); 300 attrs.put("sessionid", ""); 301 attrs.put("layers", sbenvsel.toString()); 302 attrs.put("method", "maxent"); 303 attrs.put("params", sbParams.toString()); 304 attrs.put("downloadfile", ""); 305 getMapComposer().updateUserLog(attrs, "analysis result: " + CommonData.satServer + "/output/maxent/" + pid + "/species.html"); 306 String options = ""; 307 options += "Jackknife: " + chkJackknife.isChecked(); 308 options += ";Response curves: " + chkRCurves.isChecked(); 309 options += ";Test per: " + txtTestPercentage.getValue(); 310 if (query instanceof BiocacheQuery) { 311 BiocacheQuery bq = (BiocacheQuery) query; 312 options = bq.getWS() + "|" + bq.getBS() + "|" + bq.getFullQ(false) + "|" + options; 313 remoteLogger.logMapAnalysis(tToolName.getValue(), "Tool - Prediction", area, bq.getLsids(), sbenvsel.toString(), pid, options, "STARTED"); 314 } else { 315 remoteLogger.logMapAnalysis(tToolName.getValue(), "Tool - Prediction", area, query.getName() + "__" + query.getQ(), sbenvsel.toString(), pid, options, "STARTED"); 316 } 317 } catch (Exception e) { 318 e.printStackTrace(); 319 } 320 321 this.setVisible(false); 322 323 return true; 324// } catch (NoSpeciesFoundException e) { 325// System.out.println("Maxent error: NoSpeciesFoundException"); 326// e.printStackTrace(System.out); 327// getMapComposer().showMessage("No species occurrences found in the current area. \nPlease select a larger area and re-run the analysis", this); 328 } catch (Exception e) { 329 System.out.println("Maxent error: "); 330 e.printStackTrace(System.out); 331 getMapComposer().showMessage("Unknown error.", this); 332 } 333 return false; 334 } 335 336 void openProgressBar() { 337 ProgressWCController window = (ProgressWCController) Executions.createComponents("WEB-INF/zul/AnalysisProgress.zul", getMapComposer(), null); 338 window.parent = this; 339 window.start(pid, "Prediction", isBackgroundProcess); 340 try { 341 window.doModal(); 342 } catch (Exception e) { 343 e.printStackTrace(); 344 } 345 } 346 347 public void loadMap(Event event) { 348 349 String mapurl = CommonData.geoServer + "/wms?service=WMS&version=1.1.0&request=GetMap&layers=ALA:species_" + pid + "&styles=alastyles&FORMAT=image%2Fpng"; 350 351 String legendurl = CommonData.geoServer 352 + "/wms?REQUEST=GetLegendGraphic&VERSION=1.0.0&FORMAT=image/png&WIDTH=10&HEIGHT=1" 353 + "&LAYER=ALA:species_" + pid 354 + "&STYLE=alastyles"; 355 356 System.out.println(legendurl); 357 358 //get job inputs 359 String speciesName = ""; 360 try { 361 for (String s : getJob("inputs").split(";")) { 362 if (s.startsWith("scientificName")) { 363 speciesName = s.split(":")[1]; 364 if (speciesName != null && speciesName.length() > 1) { 365 speciesName = speciesName.substring(0, 1).toUpperCase() + speciesName.substring(1); 366 } 367 break; 368 } 369 } 370 } catch (Exception e) { 371 e.printStackTrace(); 372 } 373 374 String layername = tToolName.getValue(); 375 getMapComposer().addWMSLayer("species_" + pid, layername, mapurl, (float) 0.5, null, legendurl, LayerUtilities.MAXENT, null, null); 376 MapLayer ml = getMapComposer().getMapLayer("species_" + pid); 377 ml.setData("pid", pid); 378 String infoUrl = CommonData.satServer + "/output/maxent/" + pid + "/species.html"; 379 MapLayerMetadata md = ml.getMapLayerMetadata(); 380 if (md == null) { 381 md = new MapLayerMetadata(); 382 ml.setMapLayerMetadata(md); 383 } 384 md.setMoreInfo(infoUrl + "\nMaxent Output\npid:" + pid); 385 md.setId(Long.valueOf(pid)); 386 387 try { 388 // set off the download as well 389 String fileUrl = CommonData.satServer + "/ws/download/" + pid; 390 Filedownload.save(new URL(fileUrl).openStream(), "application/zip", tToolName.getValue().replaceAll(" ", "_") + ".zip"); // "ALA_Prediction_"+pid+".zip" 391 } catch (Exception ex) { 392 System.out.println("Error generating download for prediction model:"); 393 ex.printStackTrace(System.out); 394 } 395 396 this.detach(); 397 398 //getMapComposer().showMessage("Reference number to retrieve results: " + pid); 399 400 //showInfoWindow("/output/maxent/" + pid + "/species.html"); 401 402 //perform intersection on user uploaded layers so you can facet on this layer 403 getMapComposer().addAnalysisLayerToUploadedCoordinates("species_" + pid, tToolName.getValue()); 404 } 405 406 String getJob(String type) { 407 try { 408 StringBuffer sbProcessUrl = new StringBuffer(); 409 sbProcessUrl.append(CommonData.satServer + "/ws/jobs/").append(type).append("?pid=").append(pid); 410 411 System.out.println(sbProcessUrl.toString()); 412 HttpClient client = new HttpClient(); 413 GetMethod get = new GetMethod(sbProcessUrl.toString()); 414 415 get.addRequestHeader("Accept", "text/plain"); 416 417 int result = client.executeMethod(get); 418 String slist = get.getResponseBodyAsString(); 419 System.out.println(slist); 420 return slist; 421 } catch (Exception e) { 422 e.printStackTrace(); 423 } 424 return ""; 425 } 426 427 /** 428 * get CSV of speciesName, longitude, latitude in [0] and 429 * 430 * @param selectedSpecies 431 * @param area 432 * @return 433 */ 434 private String[] getSpeciesData(Query query) throws NoSpeciesFoundException { 435 if (query instanceof UploadQuery) { 436 //no sensitive records in upload 437 ArrayList<QueryField> fields = new ArrayList<QueryField>(); 438 String lsidFieldName = query.getSpeciesIdFieldName(); 439 QueryField qf = null; 440 if (lsidFieldName != null) { 441 qf = new QueryField(query.getSpeciesIdFieldName()); 442 qf.setStored(true); 443 fields.add(qf); 444 } 445 double[] points = query.getPoints(fields); 446 StringBuilder sb = null; 447 if (points != null) { 448 sb = new StringBuilder(); 449 for (int i = 0; i < points.length; i += 2) { 450 if (sb.length() == 0) { 451 //header 452 sb.append("species,longitude,latitude"); 453 } 454 sb.append("\nspecies,").append(points[i]).append(",").append(points[i + 1]); 455 } 456 } 457 458 String[] out = {((sb == null) ? null : sb.toString()), null}; 459 return out; 460 } else { 461 //identify sensitive species records 462 List<String[]> sensitiveSpecies = null; 463 try { 464 String sensitiveSpeciesRaw = new BiocacheQuery(null, null, "sensitive:[* TO *]", null, false, null).speciesList(); 465 CSVReader csv = new CSVReader(new StringReader(sensitiveSpeciesRaw)); 466 sensitiveSpecies = csv.readAll(); 467 csv.close(); 468 } catch (Exception e) { 469 e.printStackTrace(); 470 } 471 HashSet<String> sensitiveSpeciesFound = new HashSet<String>(); 472 HashSet<String> sensitiveLsids = new HashSet<String>(); 473 474 //add to 'identified' sensitive list 475 try { 476 CSVReader csv = new CSVReader(new StringReader(query.speciesList())); 477 List<String[]> fullSpeciesList = csv.readAll(); 478 csv.close(); 479 for (int i = 0; i < fullSpeciesList.size(); i++) { 480 String[] sa = fullSpeciesList.get(i); 481 for (String[] ss : sensitiveSpecies) { 482 if (sa != null && sa.length > 4 483 && ss != null && ss.length > 4 484 && sa[4].equals(ss[4])) { 485 sensitiveSpeciesFound.add(ss[4] + "," + ss[1] + "," + ss[3]); 486 sensitiveLsids.add(ss[4]); 487 break; 488 } 489 } 490 } 491 } catch (Exception e) { 492 e.printStackTrace(); 493 } 494 495 //remove sensitive records that will not be LSID matched 496 Query maxentQuery = query.newFacet(new Facet("sensitive", "[* TO *]", false), false); 497 ArrayList<QueryField> fields = new ArrayList<QueryField>(); 498 String lsidFieldName = maxentQuery.getSpeciesIdFieldName(); 499 QueryField qf = null; 500 if (lsidFieldName != null) { 501 qf = new QueryField(maxentQuery.getSpeciesIdFieldName()); 502 qf.setStored(true); 503 fields.add(qf); 504 } 505 double[] points = maxentQuery.getPoints(fields); 506 StringBuilder sb = null; 507 if (points != null) { 508 sb = new StringBuilder(); 509 for (int i = 0; i < points.length; i += 2) { 510 boolean isSensitive = false; 511 if (qf != null) { 512 String lsid = qf.getAsString(i / 2); 513 isSensitive = sensitiveLsids.contains(lsid); 514 } 515 if (!isSensitive) { 516 if (sb.length() == 0) { 517 //header 518 sb.append("species,longitude,latitude"); 519 } 520 sb.append("\nspecies,").append(points[i]).append(",").append(points[i + 1]); 521 } 522 } 523 } 524 525 //collate sensitive species found, no header 526 StringBuilder sen = new StringBuilder(); 527 for (String s : sensitiveSpeciesFound) { 528 sen.append(s).append("\n"); 529 } 530 531 String[] out = {((sb == null) ? null : sb.toString()), (sen.length() == 0) ? null : sen.toString()}; 532 return out; 533 } 534 } 535 536 @Override 537 void fixFocus() { 538 switch (currentStep) { 539 case 1: 540 rgArea.setFocus(true); 541 break; 542 case 2: 543 if (rSpeciesSearch.isChecked()) { 544 searchSpeciesACComp.getAutoComplete().setFocus(true); 545 } else { 546 rgSpecies.setFocus(true); 547 } 548 break; 549 case 3: 550 lbListLayers.setFocus(true); 551 break; 552 case 4: 553 chkJackknife.setFocus(true); 554 break; 555 case 5: 556 tToolName.setFocus(true); 557 break; 558 } 559 } 560}