PageRenderTime 68ms CodeModel.GetById 29ms RepoModel.GetById 0ms app.codeStats 1ms

/geonode/layers/tests.py

https://github.com/GeoNode/geonode
Python | 1121 lines | 989 code | 56 blank | 76 comment | 29 complexity | 2d7d270408c393fecd43091b0439015b MD5 | raw file
Possible License(s): BSD-3-Clause, Apache-2.0
  1. #########################################################################
  2. #
  3. # Copyright (C) 2016 OSGeo
  4. #
  5. # This program is free software: you can redistribute it and/or modify
  6. # it under the terms of the GNU General Public License as published by
  7. # the Free Software Foundation, either version 3 of the License, or
  8. # (at your option) any later version.
  9. #
  10. # This program is distributed in the hope that it will be useful,
  11. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. # GNU General Public License for more details.
  14. #
  15. # You should have received a copy of the GNU General Public License
  16. # along with this program. If not, see <http://www.gnu.org/licenses/>.
  17. #
  18. #########################################################################
  19. import io
  20. import os
  21. import shutil
  22. import gisdata
  23. import logging
  24. import zipfile
  25. from uuid import uuid4
  26. from unittest.mock import MagicMock, patch
  27. from collections import namedtuple
  28. from pinax.ratings.models import OverallRating
  29. from django.urls import reverse
  30. from django.test import TestCase
  31. from django.forms import ValidationError
  32. from django.test.client import RequestFactory
  33. from django.contrib.contenttypes.models import ContentType
  34. from django.core.files.uploadedfile import SimpleUploadedFile
  35. from django.contrib.auth.models import Group
  36. from django.contrib.gis.geos import Polygon
  37. from django.db.models import Count
  38. from django.contrib.auth import get_user_model
  39. from django.conf import settings
  40. from django.test.utils import override_settings
  41. from django.contrib.admin.sites import AdminSite
  42. from geonode.geoserver.createlayer.utils import create_dataset
  43. from geonode.layers import utils
  44. from geonode.base import enumerations
  45. from geonode.layers import DatasetAppConfig
  46. from geonode.layers.admin import DatasetAdmin
  47. from geonode.decorators import on_ogc_backend
  48. from geonode.maps.models import Map, MapLayer
  49. from geonode.utils import DisableDjangoSignals, mkdtemp
  50. from geonode.layers.views import _resolve_dataset
  51. from geonode import GeoNodeException, geoserver
  52. from geonode.people.utils import get_valid_user
  53. from guardian.shortcuts import get_anonymous_user
  54. from geonode.tests.base import GeoNodeBaseTestSupport
  55. from geonode.resource.manager import resource_manager
  56. from geonode.tests.utils import NotificationsTestsHelper
  57. from geonode.layers.models import Dataset, Style, Attribute
  58. from geonode.layers.forms import DatasetForm, JSONField, LayerUploadForm
  59. from geonode.layers.populate_datasets_data import create_dataset_data
  60. from geonode.base.models import TopicCategory, License, Region, Link
  61. from geonode.utils import check_ogc_backend, set_resource_default_links
  62. from geonode.layers.metadata import convert_keyword, set_metadata, parse_metadata
  63. from geonode.layers.utils import (
  64. is_sld_upload_only,
  65. is_xml_upload_only,
  66. dataset_type,
  67. get_files,
  68. get_valid_name,
  69. get_valid_dataset_name,
  70. surrogate_escape_string, validate_input_source)
  71. from geonode.base.populate_test_data import (
  72. all_public,
  73. create_models,
  74. remove_models,
  75. create_single_dataset)
  76. logger = logging.getLogger(__name__)
  77. class DatasetsTest(GeoNodeBaseTestSupport):
  78. """Tests geonode.layers app/module
  79. """
  80. type = 'dataset'
  81. fixtures = [
  82. 'initial_data.json',
  83. 'group_test_data.json',
  84. 'default_oauth_apps.json'
  85. ]
  86. @classmethod
  87. def setUpClass(cls):
  88. super().setUpClass()
  89. create_models(type=cls.get_type, integration=cls.get_integration)
  90. all_public()
  91. @classmethod
  92. def tearDownClass(cls):
  93. super().tearDownClass()
  94. remove_models(cls.get_obj_ids, type=cls.get_type, integration=cls.get_integration)
  95. def setUp(self):
  96. super().setUp()
  97. self.user = 'admin'
  98. self.passwd = 'admin'
  99. self.anonymous_user = get_anonymous_user()
  100. self.exml_path = f"{settings.PROJECT_ROOT}/base/fixtures/test_xml.xml"
  101. self.sld_path = f"{settings.PROJECT_ROOT}/base/fixtures/test_sld.sld"
  102. self.maxDiff = None
  103. self.sut = create_single_dataset("single_point")
  104. create_dataset_data(self.sut.resourcebase_ptr_id)
  105. create_dataset_data(Dataset.objects.first().resourcebase_ptr_id)
  106. self.r = namedtuple('GSCatalogRes', ['resource'])
  107. site = AdminSite()
  108. self.admin = DatasetAdmin(Dataset, site)
  109. self.request_admin = RequestFactory().get('/admin')
  110. self.request_admin.user = get_user_model().objects.get(username='admin')
  111. # Admin Tests
  112. def test_admin_save_model(self):
  113. obj = Dataset.objects.first()
  114. self.assertEqual(len(obj.keywords.all()), 2)
  115. form = self.admin.get_form(self.request_admin, obj=obj, change=True)
  116. self.admin.save_model(self.request_admin, obj, form, True)
  117. def test_default_sourcetype(self):
  118. obj = Dataset.objects.first()
  119. self.assertEqual(obj.sourcetype, enumerations.SOURCE_TYPE_LOCAL)
  120. # Data Tests
  121. def test_describe_data_2(self):
  122. '''/data/geonode:CA/metadata -> Test accessing the description of a layer '''
  123. self.assertEqual(10, get_user_model().objects.all().count())
  124. response = self.client.get(reverse('dataset_metadata', args=('geonode:CA',)))
  125. # Since we are not authenticated, we should not be able to access it
  126. self.assertEqual(response.status_code, 302)
  127. # but if we log in ...
  128. self.client.login(username='admin', password='admin')
  129. # ... all should be good
  130. response = self.client.get(reverse('dataset_metadata', args=('geonode:CA',)))
  131. self.assertEqual(response.status_code, 200)
  132. def test_describe_data_3(self):
  133. '''/data/geonode:CA/metadata_detail -> Test accessing the description of a layer '''
  134. self.client.login(username='admin', password='admin')
  135. # ... all should be good
  136. response = self.client.get(reverse('dataset_metadata_detail', args=('geonode:CA',)))
  137. self.assertEqual(response.status_code, 200)
  138. self.assertContains(response, "Approved", count=1, status_code=200, msg_prefix='', html=False)
  139. self.assertContains(response, "Published", count=1, status_code=200, msg_prefix='', html=False)
  140. self.assertContains(response, "Featured", count=3, status_code=200, msg_prefix='', html=False)
  141. self.assertContains(response, "<dt>Group</dt>", count=0, status_code=200, msg_prefix='', html=False)
  142. # ... now assigning a Group to the Dataset
  143. lyr = Dataset.objects.get(alternate='geonode:CA')
  144. group = Group.objects.first()
  145. lyr.group = group
  146. lyr.save()
  147. response = self.client.get(reverse('dataset_metadata_detail', args=('geonode:CA',)))
  148. self.assertEqual(response.status_code, 200)
  149. self.assertContains(response, "<dt>Group</dt>", count=1, status_code=200, msg_prefix='', html=False)
  150. lyr.group = None
  151. lyr.save()
  152. # Dataset Tests
  153. def test_dataset_name_clash(self):
  154. _ll_1 = Dataset.objects.create(
  155. uuid=str(uuid4()),
  156. owner=get_user_model().objects.get(username=self.user),
  157. name='states',
  158. store='geonode_data',
  159. subtype="vector",
  160. alternate="geonode:states"
  161. )
  162. _ll_2 = Dataset.objects.create(
  163. uuid=str(uuid4()),
  164. owner=get_user_model().objects.get(username=self.user),
  165. name='geonode:states',
  166. store='httpfooremoteservce',
  167. subtype="remote",
  168. alternate="geonode:states"
  169. )
  170. _ll_1.set_permissions({'users': {"bobby": ['base.view_resourcebase']}})
  171. _ll_2.set_permissions({'users': {"bobby": ['base.view_resourcebase']}})
  172. self.client.login(username="bobby", password="bob")
  173. _request = self.client.request()
  174. _request.user = get_user_model().objects.get(username="bobby")
  175. _ll = _resolve_dataset(_request, alternate="geonode:states")
  176. self.assertIsNotNone(_ll)
  177. self.assertEqual(_ll.name, _ll_1.name)
  178. def test_describe_data(self):
  179. '''/data/geonode:CA/metadata -> Test accessing the description of a layer '''
  180. self.assertEqual(10, get_user_model().objects.all().count())
  181. response = self.client.get(reverse('dataset_metadata', args=('geonode:CA',)))
  182. # Since we are not authenticated, we should not be able to access it
  183. self.assertEqual(response.status_code, 302)
  184. # but if we log in ...
  185. self.client.login(username='admin', password='admin')
  186. # ... all should be good
  187. response = self.client.get(reverse('dataset_metadata', args=('geonode:CA',)))
  188. self.assertEqual(response.status_code, 200)
  189. def test_dataset_attributes(self):
  190. lyr = Dataset.objects.all().first()
  191. # There should be a total of 3 attributes
  192. self.assertEqual(len(lyr.attribute_set.all()), 4)
  193. # 2 out of 3 attributes should be visible
  194. custom_attributes = lyr.attribute_set.visible()
  195. self.assertEqual(len(custom_attributes), 3)
  196. # place_ name should come before description
  197. self.assertEqual(custom_attributes[0].attribute_label, "Place Name")
  198. self.assertEqual(custom_attributes[1].attribute_label, "Description")
  199. self.assertEqual(
  200. custom_attributes[2].attribute,
  201. 'N\xfamero_De_M\xe9dicos')
  202. # TODO: do test against layer with actual attribute statistics
  203. self.assertEqual(custom_attributes[1].count, 1)
  204. self.assertEqual(custom_attributes[1].min, "NA")
  205. self.assertEqual(custom_attributes[1].max, "NA")
  206. self.assertEqual(custom_attributes[1].average, "NA")
  207. self.assertEqual(custom_attributes[1].median, "NA")
  208. self.assertEqual(custom_attributes[1].stddev, "NA")
  209. self.assertEqual(custom_attributes[1].sum, "NA")
  210. self.assertEqual(custom_attributes[1].unique_values, "NA")
  211. def test_dataset_bbox(self):
  212. lyr = Dataset.objects.all().first()
  213. dataset_bbox = lyr.bbox[0:4]
  214. logger.debug(dataset_bbox)
  215. def decimal_encode(bbox):
  216. _bbox = [float(o) for o in bbox]
  217. # Must be in the form : [x0, x1, y0, y1
  218. return [_bbox[0], _bbox[2], _bbox[1], _bbox[3]]
  219. from geonode.utils import bbox_to_projection
  220. projected_bbox = decimal_encode(
  221. bbox_to_projection([float(coord) for coord in dataset_bbox] + [lyr.srid, ],
  222. target_srid=4326)[:4])
  223. logger.debug(projected_bbox)
  224. self.assertEqual(projected_bbox, [-180.0, -90.0, 180.0, 90.0])
  225. logger.debug(lyr.ll_bbox)
  226. self.assertEqual(lyr.ll_bbox, [-180.0, 180.0, -90.0, 90.0, 'EPSG:4326'])
  227. projected_bbox = decimal_encode(
  228. bbox_to_projection([float(coord) for coord in dataset_bbox] + [lyr.srid, ],
  229. target_srid=3857)[:4])
  230. solution = [-20037397.023298454, -74299743.40065672,
  231. 20037397.02329845, 74299743.40061197]
  232. logger.debug(projected_bbox)
  233. for coord, check in zip(projected_bbox, solution):
  234. self.assertAlmostEqual(coord, check, places=3)
  235. def test_dataset_attributes_feature_catalogue(self):
  236. """ Test layer feature catalogue functionality
  237. """
  238. self.assertTrue(self.client.login(username='admin', password='admin'))
  239. # test a non-existing layer
  240. url = reverse('dataset_feature_catalogue', args=('bad_dataset',))
  241. response = self.client.get(url)
  242. self.assertEqual(response.status_code, 404)
  243. # Get the layer to work with
  244. layer = Dataset.objects.all()[3]
  245. url = reverse('dataset_feature_catalogue', args=(layer.alternate,))
  246. response = self.client.get(url)
  247. self.assertNotEqual(response.status_code, 404)
  248. def test_dataset_attribute_config(self):
  249. lyr = Dataset.objects.all().first()
  250. attribute_config = lyr.attribute_config()
  251. custom_attributes = attribute_config["getFeatureInfo"]
  252. self.assertEqual(
  253. custom_attributes["fields"], [
  254. "place_name", "description", 'N\xfamero_De_M\xe9dicos'])
  255. self.assertEqual(
  256. custom_attributes["propertyNames"]["description"],
  257. "Description")
  258. self.assertEqual(
  259. custom_attributes["propertyNames"]["place_name"],
  260. "Place Name")
  261. attributes = Attribute.objects.filter(dataset=lyr)
  262. for _att in attributes:
  263. self.assertEqual(_att.featureinfo_type, 'type_property')
  264. lyr.featureinfo_custom_template = "<h1>Test HTML</h1>"
  265. lyr.use_featureinfo_custom_template = True
  266. lyr.save()
  267. attribute_config = lyr.attribute_config()
  268. self.assertTrue("ftInfoTemplate" in attribute_config)
  269. self.assertEqual(
  270. attribute_config["ftInfoTemplate"],
  271. "<h1>Test HTML</h1>")
  272. lyr.use_featureinfo_custom_template = False
  273. lyr.save()
  274. attribute_config = lyr.attribute_config()
  275. self.assertTrue("ftInfoTemplate" not in attribute_config)
  276. def test_dataset_styles(self):
  277. lyr = Dataset.objects.all().first()
  278. # There should be a total of 3 styles
  279. self.assertEqual(len(lyr.styles.all()), 4)
  280. # One of the style is the default one
  281. self.assertEqual(
  282. lyr.default_style,
  283. Style.objects.get(
  284. id=lyr.default_style.id))
  285. try:
  286. [str(style) for style in lyr.styles.all()]
  287. except UnicodeEncodeError:
  288. self.fail(
  289. "str of the Style model throws a UnicodeEncodeError with special characters.")
  290. def test_dataset_links(self):
  291. lyr = Dataset.objects.filter(subtype="vector").first()
  292. self.assertEqual(lyr.subtype, "vector")
  293. if check_ogc_backend(geoserver.BACKEND_PACKAGE):
  294. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata")
  295. self.assertIsNotNone(links)
  296. for ll in links:
  297. self.assertEqual(ll.link_type, "metadata")
  298. _def_link_types = (
  299. 'data', 'image', 'original', 'html', 'OGC:WMS', 'OGC:WFS', 'OGC:WCS')
  300. Link.objects.filter(resource=lyr.resourcebase_ptr, link_type__in=_def_link_types).delete()
  301. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data")
  302. self.assertIsNotNone(links)
  303. set_resource_default_links(lyr, lyr)
  304. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata")
  305. self.assertIsNotNone(links)
  306. for ll in links:
  307. self.assertEqual(ll.link_type, "metadata")
  308. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data")
  309. self.assertIsNotNone(links)
  310. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="image")
  311. self.assertIsNotNone(links)
  312. lyr = Dataset.objects.filter(subtype="raster").first()
  313. self.assertEqual(lyr.subtype, "raster")
  314. if check_ogc_backend(geoserver.BACKEND_PACKAGE):
  315. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata")
  316. self.assertIsNotNone(links)
  317. for ll in links:
  318. self.assertEqual(ll.link_type, "metadata")
  319. _def_link_types = (
  320. 'data', 'image', 'original', 'html', 'OGC:WMS', 'OGC:WFS', 'OGC:WCS')
  321. Link.objects.filter(resource=lyr.resourcebase_ptr, link_type__in=_def_link_types).delete()
  322. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data")
  323. self.assertIsNotNone(links)
  324. set_resource_default_links(lyr, lyr)
  325. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata")
  326. self.assertIsNotNone(links)
  327. for ll in links:
  328. self.assertEqual(ll.link_type, "metadata")
  329. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data")
  330. self.assertIsNotNone(links)
  331. links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="image")
  332. self.assertIsNotNone(links)
  333. def test_get_valid_user(self):
  334. # Verify it accepts an admin user
  335. adminuser = get_user_model().objects.get(is_superuser=True)
  336. valid_user = get_valid_user(adminuser)
  337. msg = (f'Passed in a valid admin user "{adminuser}" but got "{valid_user}" in return')
  338. assert valid_user.id == adminuser.id, msg
  339. # Verify it returns a valid user after receiving None
  340. valid_user = get_valid_user(None)
  341. msg = f'Expected valid user after passing None, got "{valid_user}"'
  342. assert isinstance(valid_user, get_user_model()), msg
  343. newuser = get_user_model().objects.create(username='arieluser')
  344. valid_user = get_valid_user(newuser)
  345. msg = (f'Passed in a valid user "{newuser}" but got "{valid_user}" in return')
  346. assert valid_user.id == newuser.id, msg
  347. valid_user = get_valid_user('arieluser')
  348. msg = ('Passed in a valid user by username "arieluser" but got'
  349. f' "{valid_user}" in return')
  350. assert valid_user.username == 'arieluser', msg
  351. nn = get_anonymous_user()
  352. self.assertRaises(GeoNodeException, get_valid_user, nn)
  353. def testShapefileValidation(self):
  354. files = dict(
  355. base_file=SimpleUploadedFile('foo.shp', b' '),
  356. shx_file=SimpleUploadedFile('foo.shx', b' '),
  357. dbf_file=SimpleUploadedFile('foo.dbf', b' '),
  358. prj_file=SimpleUploadedFile('foo.prj', b' '))
  359. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  360. files = dict(
  361. base_file=SimpleUploadedFile('foo.SHP', b' '),
  362. shx_file=SimpleUploadedFile('foo.SHX', b' '),
  363. dbf_file=SimpleUploadedFile('foo.DBF', b' '),
  364. prj_file=SimpleUploadedFile('foo.PRJ', b' '))
  365. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  366. files = dict(
  367. base_file=SimpleUploadedFile('foo.SHP', b' '),
  368. shx_file=SimpleUploadedFile('foo.shx', b' '),
  369. dbf_file=SimpleUploadedFile('foo.dbf', b' '))
  370. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  371. files = dict(
  372. base_file=SimpleUploadedFile('foo.SHP', b' '),
  373. shx_file=SimpleUploadedFile('foo.shx', b' '),
  374. dbf_file=SimpleUploadedFile('foo.dbf', b' '),
  375. prj_file=SimpleUploadedFile('foo.PRJ', b' '))
  376. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  377. files = dict(
  378. base_file=SimpleUploadedFile('foo.SHP', b' '),
  379. shx_file=SimpleUploadedFile('bar.shx', b' '),
  380. dbf_file=SimpleUploadedFile('bar.dbf', b' '),
  381. prj_file=SimpleUploadedFile('bar.PRJ', b' '))
  382. self.assertFalse(LayerUploadForm(dict(), files).is_valid())
  383. files = dict(
  384. base_file=SimpleUploadedFile('foo.shp', b' '),
  385. dbf_file=SimpleUploadedFile('foo.dbf', b' '),
  386. prj_file=SimpleUploadedFile('foo.PRJ', b' '))
  387. self.assertFalse(LayerUploadForm(dict(), files).is_valid())
  388. files = dict(
  389. base_file=SimpleUploadedFile('foo.txt', b' '),
  390. shx_file=SimpleUploadedFile('foo.shx', b' '),
  391. dbf_file=SimpleUploadedFile('foo.sld', b' '),
  392. prj_file=SimpleUploadedFile('foo.prj', b' '))
  393. self.assertFalse(LayerUploadForm(dict(), files).is_valid())
  394. def testGeoTiffValidation(self):
  395. files = dict(base_file=SimpleUploadedFile('foo.tif', b' '))
  396. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  397. files = dict(base_file=SimpleUploadedFile('foo.TIF', b' '))
  398. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  399. files = dict(base_file=SimpleUploadedFile('foo.tiff', b' '))
  400. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  401. files = dict(base_file=SimpleUploadedFile('foo.TIF', b' '))
  402. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  403. files = dict(base_file=SimpleUploadedFile('foo.geotif', b' '))
  404. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  405. files = dict(base_file=SimpleUploadedFile('foo.GEOTIF', b' '))
  406. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  407. files = dict(base_file=SimpleUploadedFile('foo.geotiff', b' '))
  408. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  409. files = dict(base_file=SimpleUploadedFile('foo.GEOTIF', b' '))
  410. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  411. def testASCIIValidation(self):
  412. files = dict(base_file=SimpleUploadedFile('foo.asc', b' '))
  413. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  414. files = dict(base_file=SimpleUploadedFile('foo.ASC', b' '))
  415. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  416. def testZipValidation(self):
  417. the_zip = zipfile.ZipFile('test_upload.zip', 'w')
  418. in_memory_file = io.StringIO()
  419. in_memory_file.write('test')
  420. the_zip.writestr('foo.shp', in_memory_file.getvalue())
  421. the_zip.writestr('foo.dbf', in_memory_file.getvalue())
  422. the_zip.writestr('foo.shx', in_memory_file.getvalue())
  423. the_zip.writestr('foo.prj', in_memory_file.getvalue())
  424. the_zip.close()
  425. files = dict(base_file=SimpleUploadedFile('test_upload.zip',
  426. open('test_upload.zip', mode='rb').read()))
  427. self.assertTrue(LayerUploadForm(dict(), files).is_valid())
  428. os.remove('test_upload.zip')
  429. def testWriteFiles(self):
  430. files = dict(
  431. base_file=SimpleUploadedFile('foo.shp', b' '),
  432. shx_file=SimpleUploadedFile('foo.shx', b' '),
  433. dbf_file=SimpleUploadedFile('foo.dbf', b' '),
  434. prj_file=SimpleUploadedFile('foo.prj', b' '))
  435. form = LayerUploadForm(dict(), files)
  436. self.assertTrue(form.is_valid())
  437. tempdir = form.write_files()[0]
  438. self.assertEqual(set(os.listdir(tempdir)),
  439. {'foo.shp', 'foo.shx', 'foo.dbf', 'foo.prj'})
  440. the_zip = zipfile.ZipFile('test_upload.zip', 'w')
  441. in_memory_file = io.StringIO()
  442. in_memory_file.write('test')
  443. the_zip.writestr('foo.shp', in_memory_file.getvalue())
  444. the_zip.writestr('foo.dbf', in_memory_file.getvalue())
  445. the_zip.writestr('foo.shx', in_memory_file.getvalue())
  446. the_zip.writestr('foo.prj', in_memory_file.getvalue())
  447. the_zip.close()
  448. files = dict(base_file=SimpleUploadedFile('test_upload.zip',
  449. open('test_upload.zip', mode='rb').read()))
  450. form = LayerUploadForm(dict(), files)
  451. self.assertTrue(form.is_valid())
  452. tempdir = form.write_files()[0]
  453. self.assertEqual(set(os.listdir(tempdir)),
  454. {'foo.shp', 'foo.shx', 'foo.dbf', 'foo.prj'})
  455. os.remove('test_upload.zip')
  456. def test_dataset_type(self):
  457. self.assertEqual(dataset_type('foo.shp'), 'vector')
  458. self.assertEqual(dataset_type('foo.SHP'), 'vector')
  459. self.assertEqual(dataset_type('foo.sHp'), 'vector')
  460. self.assertEqual(dataset_type('foo.tif'), 'raster')
  461. self.assertEqual(dataset_type('foo.TIF'), 'raster')
  462. self.assertEqual(dataset_type('foo.TiF'), 'raster')
  463. self.assertEqual(dataset_type('foo.geotif'), 'raster')
  464. self.assertEqual(dataset_type('foo.GEOTIF'), 'raster')
  465. self.assertEqual(dataset_type('foo.gEoTiF'), 'raster')
  466. self.assertEqual(dataset_type('foo.tiff'), 'raster')
  467. self.assertEqual(dataset_type('foo.TIFF'), 'raster')
  468. self.assertEqual(dataset_type('foo.TiFf'), 'raster')
  469. self.assertEqual(dataset_type('foo.geotiff'), 'raster')
  470. self.assertEqual(dataset_type('foo.GEOTIFF'), 'raster')
  471. self.assertEqual(dataset_type('foo.gEoTiFf'), 'raster')
  472. self.assertEqual(dataset_type('foo.asc'), 'raster')
  473. self.assertEqual(dataset_type('foo.ASC'), 'raster')
  474. self.assertEqual(dataset_type('foo.AsC'), 'raster')
  475. # basically anything else should produce a GeoNodeException
  476. self.assertRaises(GeoNodeException, lambda: dataset_type('foo.gml'))
  477. def test_get_files(self):
  478. def generate_files(*extensions):
  479. if extensions[0].lower() != 'shp':
  480. return
  481. d = None
  482. expected_files = None
  483. try:
  484. d = mkdtemp()
  485. fnames = [f"foo.{ext}" for ext in extensions]
  486. expected_files = {ext.lower(): fname for ext, fname in zip(extensions, fnames)}
  487. for f in fnames:
  488. path = os.path.join(d, f)
  489. # open and immediately close to create empty file
  490. open(path, 'w').close()
  491. finally:
  492. return d, expected_files
  493. # Check that a well-formed Shapefile has its components all picked up
  494. d = None
  495. _tmpdir = None
  496. try:
  497. d, expected_files = generate_files("shp", "shx", "prj", "dbf")
  498. gotten_files, _tmpdir = get_files(os.path.join(d, "foo.shp"))
  499. gotten_files = {k: os.path.basename(v) for k, v in gotten_files.items()}
  500. self.assertEqual(gotten_files, expected_files)
  501. finally:
  502. if d is not None:
  503. shutil.rmtree(d, ignore_errors=True)
  504. if _tmpdir is not None:
  505. shutil.rmtree(_tmpdir, ignore_errors=True)
  506. # Check that a Shapefile missing required components raises an
  507. # exception
  508. d = None
  509. try:
  510. d, expected_files = generate_files("shp", "shx", "prj")
  511. self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp")))
  512. finally:
  513. if d is not None:
  514. shutil.rmtree(d, ignore_errors=True)
  515. # Check that including an SLD with a valid shapefile results in the SLD
  516. # getting picked up
  517. d = None
  518. _tmpdir = None
  519. try:
  520. if check_ogc_backend(geoserver.BACKEND_PACKAGE):
  521. d, expected_files = generate_files("shp", "shx", "prj", "dbf", "sld")
  522. gotten_files, _tmpdir = get_files(os.path.join(d, "foo.shp"))
  523. gotten_files = {k: os.path.basename(v) for k, v in gotten_files.items()}
  524. self.assertEqual(gotten_files, expected_files)
  525. finally:
  526. if d is not None:
  527. shutil.rmtree(d, ignore_errors=True)
  528. if _tmpdir is not None:
  529. shutil.rmtree(_tmpdir, ignore_errors=True)
  530. # Check that capitalized extensions are ok
  531. d = None
  532. _tmpdir = None
  533. try:
  534. d, expected_files = generate_files("SHP", "SHX", "PRJ", "DBF")
  535. gotten_files, _tmpdir = get_files(os.path.join(d, "foo.SHP"))
  536. gotten_files = {k: os.path.basename(v) for k, v in gotten_files.items()}
  537. self.assertEqual(gotten_files, expected_files)
  538. finally:
  539. if d is not None:
  540. shutil.rmtree(d, ignore_errors=True)
  541. if _tmpdir is not None:
  542. shutil.rmtree(_tmpdir, ignore_errors=True)
  543. # Check that mixed capital and lowercase extensions are ok
  544. d = None
  545. _tmpdir = None
  546. try:
  547. d, expected_files = generate_files("SHP", "shx", "pRJ", "DBF")
  548. gotten_files, _tmpdir = get_files(os.path.join(d, "foo.SHP"))
  549. gotten_files = {k: os.path.basename(v) for k, v in gotten_files.items()}
  550. self.assertEqual(gotten_files, expected_files)
  551. finally:
  552. if d is not None:
  553. shutil.rmtree(d, ignore_errors=True)
  554. if _tmpdir is not None:
  555. shutil.rmtree(_tmpdir, ignore_errors=True)
  556. # Check that including both capital and lowercase extensions raises an
  557. # exception
  558. d = None
  559. try:
  560. d, expected_files = generate_files("SHP", "SHX", "PRJ", "DBF", "shp", "shx", "prj", "dbf")
  561. # Only run the tests if this is a case sensitive OS
  562. if len(os.listdir(d)) == len(expected_files):
  563. self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP")))
  564. self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp")))
  565. finally:
  566. if d is not None:
  567. shutil.rmtree(d, ignore_errors=True)
  568. # Check that including both capital and lowercase PRJ (this is
  569. # special-cased in the implementation)
  570. d = None
  571. try:
  572. d, expected_files = generate_files("SHP", "SHX", "PRJ", "DBF", "prj")
  573. # Only run the tests if this is a case sensitive OS
  574. if len(os.listdir(d)) == len(expected_files):
  575. self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP")))
  576. self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp")))
  577. finally:
  578. if d is not None:
  579. shutil.rmtree(d, ignore_errors=True)
  580. # Check that including both capital and lowercase SLD (this is
  581. # special-cased in the implementation)
  582. d = None
  583. try:
  584. if check_ogc_backend(geoserver.BACKEND_PACKAGE):
  585. d, expected_files = generate_files("SHP", "SHX", "PRJ", "DBF", "SLD", "sld")
  586. # Only run the tests if this is a case sensitive OS
  587. if len(os.listdir(d)) == len(expected_files):
  588. self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP")))
  589. self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp")))
  590. finally:
  591. if d is not None:
  592. shutil.rmtree(d, ignore_errors=True)
  593. def test_get_valid_name(self):
  594. self.assertEqual(get_valid_name("blug"), "blug")
  595. self.assertEqual(get_valid_name("<-->"), "_")
  596. self.assertEqual(get_valid_name("<ab>"), "_ab_")
  597. self.assertNotEqual(get_valid_name("CA"), "CA_1")
  598. self.assertNotEqual(get_valid_name("CA"), "CA_1")
  599. def test_get_valid_dataset_name(self):
  600. self.assertEqual(get_valid_dataset_name("blug", False), "blug")
  601. self.assertEqual(get_valid_dataset_name("blug", True), "blug")
  602. self.assertEqual(get_valid_dataset_name("<ab>", False), "_ab_")
  603. self.assertEqual(get_valid_dataset_name("<ab>", True), "<ab>")
  604. self.assertEqual(get_valid_dataset_name("<-->", False), "_")
  605. self.assertEqual(get_valid_dataset_name("<-->", True), "<-->")
  606. self.assertNotEqual(get_valid_dataset_name("CA", False), "CA_1")
  607. self.assertNotEqual(get_valid_dataset_name("CA", False), "CA_1")
  608. self.assertEqual(get_valid_dataset_name("CA", True), "CA")
  609. self.assertEqual(get_valid_dataset_name("CA", True), "CA")
  610. layer = Dataset.objects.get(name="CA")
  611. self.assertNotEqual(get_valid_dataset_name(layer, False), "CA_1")
  612. self.assertEqual(get_valid_dataset_name(layer, True), "CA")
  613. self.assertRaises(GeoNodeException, get_valid_dataset_name, 12, False)
  614. self.assertRaises(GeoNodeException, get_valid_dataset_name, 12, True)
  615. # NOTE: we don't care about file content for many of these tests (the
  616. # forms under test validate based only on file name, and leave actual
  617. # content inspection to GeoServer) but Django's form validation will omit
  618. # any files with empty bodies.
  619. #
  620. # That is, this leads to mysterious test failures:
  621. # SimpleUploadedFile('foo', ' '.encode("UTF-8"))
  622. #
  623. # And this should be used instead to avoid that:
  624. # SimpleUploadedFile('foo', ' '.encode("UTF-8"))
  625. def testJSONField(self):
  626. field = JSONField()
  627. # a valid JSON document should pass
  628. field.clean('{ "users": [] }')
  629. # text which is not JSON should fail
  630. self.assertRaises(
  631. ValidationError,
  632. lambda: field.clean('<users></users>'))
  633. def test_rating_dataset_remove(self):
  634. """ Test layer rating is removed on layer remove
  635. """
  636. # Get the layer to work with
  637. layer = Dataset.objects.all()[3]
  638. dataset_id = layer.id
  639. # Create the rating with the correct content type
  640. ctype = ContentType.objects.get(model='dataset')
  641. OverallRating.objects.create(
  642. category=2,
  643. object_id=dataset_id,
  644. content_type=ctype,
  645. rating=3)
  646. rating = OverallRating.objects.all()
  647. self.assertEqual(rating.count(), 1)
  648. # Remove the layer
  649. resource_manager.delete(layer.uuid)
  650. # Check there are no ratings matching the remove layer
  651. rating = OverallRating.objects.all()
  652. self.assertEqual(rating.count(), 0)
  653. def test_sld_upload(self):
  654. """Test layer remove functionality
  655. """
  656. layer = Dataset.objects.all().first()
  657. url = reverse('dataset_sld_upload', args=(layer.alternate,))
  658. # Now test with a valid user
  659. self.client.login(username='admin', password='admin')
  660. # test a method other than POST and GET
  661. response = self.client.put(url)
  662. content = response.content.decode('utf-8')
  663. self.assertEqual(response.status_code, 200)
  664. self.assertFalse("#modal_perms" in content)
  665. def test_category_counts(self):
  666. topics = TopicCategory.objects.all()
  667. topics = topics.annotate(
  668. **{'dataset_count': Count('resourcebase__dataset__category')})
  669. location = topics.get(identifier='location')
  670. # there are three layers with location category
  671. self.assertEqual(location.dataset_count, 3)
  672. # change the category of one layers_count
  673. layer = Dataset.objects.filter(category=location)[0]
  674. elevation = topics.get(identifier='elevation')
  675. layer.category = elevation
  676. layer.save()
  677. # reload the categories since it's caching the old count
  678. topics = topics.annotate(
  679. **{'dataset_count': Count('resourcebase__dataset__category')})
  680. location = topics.get(identifier='location')
  681. elevation = topics.get(identifier='elevation')
  682. self.assertEqual(location.dataset_count, 2)
  683. self.assertEqual(elevation.dataset_count, 4)
  684. # delete a layer and check the count update
  685. # use the first since it's the only one which has styles
  686. layer = Dataset.objects.all().first()
  687. elevation = topics.get(identifier='elevation')
  688. self.assertEqual(elevation.dataset_count, 4)
  689. layer.delete()
  690. topics = topics.annotate(
  691. **{'dataset_count': Count('resourcebase__dataset__category')})
  692. elevation = topics.get(identifier='elevation')
  693. self.assertEqual(elevation.dataset_count, 3)
  694. def test_assign_change_dataset_data_perm(self):
  695. """
  696. Ensure set_permissions supports the change_dataset_data permission.
  697. """
  698. layer = Dataset.objects.first()
  699. user = get_anonymous_user()
  700. layer.set_permissions({'users': {user.username: ['change_dataset_data']}})
  701. perms = layer.get_all_level_info()
  702. self.assertNotIn(user, perms['users'])
  703. self.assertNotIn(user.username, perms['users'])
  704. def test_batch_edit(self):
  705. """
  706. Test batch editing of metadata fields.
  707. """
  708. Model = Dataset
  709. view = 'dataset_batch_metadata'
  710. resources = Model.objects.all()[:3]
  711. ids = ','.join(str(element.pk) for element in resources)
  712. # test non-admin access
  713. self.client.login(username="bobby", password="bob")
  714. response = self.client.get(reverse(view))
  715. self.assertTrue(response.status_code in (401, 403))
  716. # test group change
  717. group = Group.objects.first()
  718. self.client.login(username='admin', password='admin')
  719. response = self.client.post(
  720. reverse(view),
  721. data={'group': group.pk, 'ids': ids, 'regions': 1},
  722. )
  723. self.assertEqual(response.status_code, 302)
  724. resources = Model.objects.filter(id__in=[r.pk for r in resources])
  725. for resource in resources:
  726. self.assertEqual(resource.group, group)
  727. # test owner change
  728. owner = get_user_model().objects.first()
  729. response = self.client.post(
  730. reverse(view),
  731. data={'owner': owner.pk, 'ids': ids, 'regions': 1},
  732. )
  733. self.assertEqual(response.status_code, 302)
  734. resources = Model.objects.filter(id__in=[r.pk for r in resources])
  735. for resource in resources:
  736. self.assertEqual(resource.owner, owner)
  737. # test license change
  738. license = License.objects.first()
  739. response = self.client.post(
  740. reverse(view),
  741. data={'license': license.pk, 'ids': ids, 'regions': 1},
  742. )
  743. self.assertEqual(response.status_code, 302)
  744. resources = Model.objects.filter(id__in=[r.pk for r in resources])
  745. for resource in resources:
  746. self.assertEqual(resource.license, license)
  747. # test regions change
  748. region = Region.objects.first()
  749. response = self.client.post(
  750. reverse(view),
  751. data={'region': region.pk, 'ids': ids, 'regions': 1},
  752. )
  753. self.assertEqual(response.status_code, 302)
  754. resources = Model.objects.filter(id__in=[r.pk for r in resources])
  755. for resource in resources:
  756. if resource.regions.all():
  757. self.assertTrue(region in resource.regions.all())
  758. # test language change
  759. language = 'eng'
  760. response = self.client.post(
  761. reverse(view),
  762. data={'language': language, 'ids': ids, 'regions': 1},
  763. )
  764. resources = Model.objects.filter(id__in=[r.pk for r in resources])
  765. for resource in resources:
  766. self.assertEqual(resource.language, language)
  767. # test keywords change
  768. keywords = 'some,thing,new'
  769. response = self.client.post(
  770. reverse(view),
  771. data={'keywords': keywords, 'ids': ids, 'regions': 1},
  772. )
  773. resources = Model.objects.filter(id__in=[r.pk for r in resources])
  774. for resource in resources:
  775. for word in resource.keywords.all():
  776. self.assertTrue(word.name in keywords.split(','))
  777. def test_surrogate_escape_string(self):
  778. surrogate_escape_raw = "Zo\udcc3\udcab"
  779. surrogate_escape_expected = "Zoƫ"
  780. surrogate_escape_result = surrogate_escape_string(
  781. surrogate_escape_raw, 'UTF-8') # add more test cases using different charsets?
  782. self.assertEqual(
  783. surrogate_escape_result,
  784. surrogate_escape_expected,
  785. "layers.utils.surrogate_escape_string did not produce expected result. "
  786. f"Expected {surrogate_escape_expected}, received {surrogate_escape_result}")
  787. @on_ogc_backend(geoserver.BACKEND_PACKAGE)
  788. def test_assign_remove_permissions(self):
  789. # Assing
  790. layer = Dataset.objects.all().first()
  791. perm_spec = layer.get_all_level_info()
  792. self.assertNotIn(get_user_model().objects.get(username="norman"), perm_spec["users"])
  793. utils.set_datasets_permissions("write", resources_names=[layer.name], users_usernames=["norman"], delete_flag=False, verbose=True)
  794. perm_spec = layer.get_all_level_info()
  795. _c = 0
  796. if "users" in perm_spec:
  797. for _u in perm_spec["users"]:
  798. if _u == "norman" or _u == get_user_model().objects.get(username="norman"):
  799. _c += 1
  800. # "norman" has both read & write permissions
  801. self.assertEqual(_c, 1)
  802. # Remove
  803. utils.set_datasets_permissions("read", resources_names=[layer.name], users_usernames=["norman"], delete_flag=True, verbose=True)
  804. perm_spec = layer.get_all_level_info()
  805. _c = 0
  806. if "users" in perm_spec:
  807. for _u in perm_spec["users"]:
  808. if _u == "norman" or _u == get_user_model().objects.get(username="norman"):
  809. _c += 1
  810. # "norman" has no permissions
  811. self.assertEqual(_c, 0)
  812. def test_xml_form_without_files_should_raise_500(self):
  813. files = dict()
  814. files['permissions'] = '{}'
  815. files['charset'] = 'utf-8'
  816. self.client.login(username="admin", password="admin")
  817. resp = self.client.post(reverse('dataset_upload'), data=files)
  818. self.assertEqual(500, resp.status_code)
  819. def test_xml_should_return_404_if_the_dataset_does_not_exists(self):
  820. params = {
  821. "permissions": '{ "users": {"AnonymousUser": ["view_resourcebase"]} , "groups":{}}',
  822. "base_file": open(self.exml_path),
  823. "xml_file": open(self.exml_path),
  824. "dataset_title": "Fake layer title",
  825. "metadata_upload_form": True,
  826. "time": False,
  827. "charset": "UTF-8"
  828. }
  829. self.client.login(username="admin", password="admin")
  830. resp = self.client.post(reverse('dataset_upload'), params)
  831. self.assertEqual(404, resp.status_code)
  832. def test_xml_should_update_the_dataset_with_the_expected_values(self):
  833. params = {
  834. "permissions": '{ "users": {"AnonymousUser": ["view_resourcebase"]} , "groups":{}}',
  835. "base_file": open(self.exml_path),
  836. "xml_file": open(self.exml_path),
  837. "dataset_title": "geonode:single_point",
  838. "metadata_upload_form": True,
  839. "time": False,
  840. "charset": "UTF-8"
  841. }
  842. self.client.login(username="admin", password="admin")
  843. prev_dataset = Dataset.objects.get(typename="geonode:single_point")
  844. self.assertEqual(0, prev_dataset.keywords.count())
  845. resp = self.client.post(reverse('dataset_upload'), params)
  846. self.assertEqual(404, resp.status_code)
  847. self.assertEqual(resp.json()["errors"], "The UUID identifier from the XML Metadata, is different from the one saved")
  848. def test_sld_should_raise_500_if_is_invalid(self):
  849. layer = Dataset.objects.get(typename="geonode:single_point")
  850. params = {
  851. "permissions": '{ "users": {"AnonymousUser": ["view_resourcebase"]} , "groups":{}}',
  852. "base_file": open(self.sld_path),
  853. "sld_file": open(self.sld_path),
  854. "dataset_title": "random",
  855. "metadata_upload_form": False,
  856. "time": False,
  857. "charset": "UTF-8"
  858. }
  859. self.client.login(username="admin", password="admin")
  860. self.assertGreaterEqual(layer.styles.count(), 1)
  861. self.assertIsNotNone(layer.styles.first())
  862. resp = self.client.post(reverse('dataset_upload'), params)
  863. self.assertEqual(500, resp.status_code)
  864. self.assertFalse(resp.json().get('success'))
  865. self.assertEqual('No Dataset matches the given query.', resp.json().get('errors'))
  866. def test_sld_should_update_the_dataset_with_the_expected_values(self):
  867. layer = Dataset.objects.get(typename="geonode:single_point")
  868. params = {
  869. "permissions": '{ "users": {"AnonymousUser": ["view_resourcebase"]} , "groups":{}}',
  870. "base_file": open(self.sld_path),
  871. "sld_file": open(self.sld_path),
  872. "dataset_title": f"geonode:{layer.name}",
  873. "metadata_upload_form": False,
  874. "time": False,
  875. "charset": "UTF-8"
  876. }
  877. self.client.login(username="admin", password="admin")
  878. self.assertGreaterEqual(layer.styles.count(), 1)
  879. self.assertIsNotNone(layer.styles.first())
  880. resp = self.client.post(reverse('dataset_upload'), params)
  881. self.assertEqual(200, resp.status_code)
  882. updated_dataset = Dataset.objects.get(alternate=f"geonode:{layer.name}")
  883. # just checking some values if are updated
  884. self.assertGreaterEqual(updated_dataset.styles.all().count(), 1)
  885. self.assertIsNotNone(updated_dataset.styles.first())
  886. self.assertEqual(layer.styles.first().sld_title, updated_dataset.styles.first().sld_title)
  887. def test_xml_should_raise_an_error_if_the_uuid_is_changed(self):
  888. '''
  889. If the UUID coming from the XML and the one saved in the DB are different
  890. The system should raise an error
  891. '''
  892. params = {
  893. "permissions": '{ "users": {"AnonymousUser": ["view_resourcebase"]} , "groups":{}}',
  894. "base_file": open(self.exml_path),
  895. "xml_file": open(self.exml_path),
  896. "dataset_title": "geonode:single_point",
  897. "metadata_upload_form": True,
  898. "time": False,
  899. "charset": "UTF-8"
  900. }
  901. self.client.login(username="admin", password="admin")
  902. prev_dataset = Dataset.objects.get(typename="geonode:single_point")
  903. self.assertEqual(0, prev_dataset.keywords.count())
  904. resp = self.client.post(reverse('dataset_upload'), params)
  905. self.assertEqual(404, resp.status_code)
  906. expected = {
  907. "success": False,
  908. "errors": "The UUID identifier from the XML Metadata, is different from the one saved"
  909. }
  910. self.assertDictEqual(expected, resp.json())
  911. def test_will_raise_exception_for_replace_vector_dataset_with_raster(self):
  912. layer = Dataset.objects.get(name="single_point")
  913. filename = "/tpm/filename.tif"
  914. files = ["/opt/file1.shp", "/opt/file2.ccc"]
  915. with self.assertRaises(Exception) as e:
  916. validate_input_source(layer, filename, files, action_type="append")
  917. expected = "You are attempting to append a vector dataset with a raster."
  918. self.assertEqual(expected, e.exception.args[0])
  919. def test_will_raise_exception_for_replace_dataset_with_unknown_format(self):
  920. layer = Dataset.objects.get(name="single_point")
  921. filename = "/tpm/filename.ccc"
  922. file_path = gisdata.VECTOR_DATA
  923. files = {
  924. "shp": filename,
  925. "dbf": f"{file_path}/san_andres_y_providencia_highway.asd",
  926. "prj": f"{file_path}/san_andres_y_providencia_highway.asd",
  927. "shx": f"{file_path}/san_andres_y_providencia_highway.asd",
  928. }
  929. with self.assertRaises(Exception) as e:
  930. validate_input_source(layer, filename, files, action_type="append")
  931. expected = "You are attempting to append a vector dataset with an unknown format."
  932. self.assertEqual(expected, e.exception.args[0])
  933. def test_will_raise_exception_for_replace_dataset_with_different_file_name(self):
  934. layer = Dataset.objects.get(name="single_point")
  935. file_path = gisdata.VECTOR_DATA
  936. filename = os.path.join(file_path, "san_andres_y_providencia_highway.shp")
  937. files = {
  938. "shp": filename,
  939. "dbf": f"{file_path}/san_andres_y_providencia_highway.sbf",
  940. "prj": f"{file_path}/san_andres_y_providencia_highway.prj",
  941. "shx": f"{file_path}/san_andres_y_providencia_highway.shx",
  942. }
  943. with self.assertRaises(Exception) as e:
  944. validate_input_source(layer, filename, files, action_type="append")
  945. expected = (
  946. "Some error occurred while trying to access the uploaded schema: "
  947. "Please ensure the name is consistent with the file you are trying to append."
  948. )
  949. self.assertEqual(expected, e.exception.args[0])
  950. @patch("geonode.layers.utils.gs_catalog")
  951. def test_will_raise_exception_for_not_existing_dataset_in_the_catalog(self, catalog):
  952. catalog.get_layer.return_value = None
  953. create_single_dataset("san_andres_y_providencia_water")
  954. layer = Dataset.objects.get(name="san_andres_y_providencia_water")
  955. file_path = gisdata.VECTOR_DATA
  956. filename = os.path.join(file_path, "san_andres_y_providencia_water.shp")
  957. files = {
  958. "shp": filename,
  959. "dbf": f"{file_path}/san_andres_y_providencia_water.sbf",
  960. "prj": f"{file_path}/san_andres_y_providencia_water.prj",
  961. "shx": f"{file_path}/san_andres_y_providencia_water.shx",
  962. }
  963. with self.assertRaises(Exception) as e:
  964. validate_input_source(layer, filename, files, action_type="append")
  965. expected = (
  966. "Some error occurred while trying to access the uploaded schema: "
  967. "The selected Dataset does not exists in the catalog."
  968. )
  969. self.assertEqual(expected, e.exception.args[0])
  970. @patch("geonode.layers.utils.gs_catalog")
  971. def test_will_raise_exception_if_schema_is_not_equal_between_catalog_and_file(self, catalog):
  972. attr = namedtuple('GSCatalogAttr', ['attributes'])
  973. attr.attributes = []
  974. self.r.resource = attr
  975. catalog.get_layer.return_value = self.r
  976. create_single_dataset("san_andres_y_providencia_water")
  977. layer = Dataset.objects.filter(name="san_andres_y_providencia_water")[0]
  978. file_path = gisdata.VECTOR_DATA
  979. filename = os.path.join(file_path, "san_andres_y_providencia_water.shp")
  980. files = {
  981. "shp": filename,
  982. "dbf": f"{file_path}/san_andres_y_providencia_water.sbf",
  983. "prj": f"{file_path}/san_andres_y_providencia_water.prj",
  984. "shx": f"{file_path}/san_andres_y_providencia_water.shx",
  985. }
  986. with self.assertRaises(Exception) as e:
  987. validate_input_source(layer, filename, files, action_type="append")
  988. expected = (
  989. "Some error occurred while trying to access the uploaded schema: "
  990. "Please ensure that the dataset structure is consistent with the file you are trying to append.