Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Fixes #4025] Regression with uploading a shapefile with no ascii characters #4026

Merged
merged 6 commits into from
Oct 31, 2018
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions geonode/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -799,7 +799,7 @@ def csw_crs(self):
@property
def group_name(self):
if self.group:
return str(self.group)
return str(self.group).encode("utf-8", "replace")
return None

@property
Expand Down Expand Up @@ -902,13 +902,13 @@ def metadata_completeness(self):
return '{}%'.format(len(filled_fields) * 100 / len(required_fields))

def keyword_list(self):
return [kw.name for kw in self.keywords.all()]
return [kw.name.encode("utf-8", "replace") for kw in self.keywords.all()]

def keyword_slug_list(self):
return [kw.slug for kw in self.keywords.all()]
return [kw.slug.encode("utf-8", "replace") for kw in self.keywords.all()]

def region_name_list(self):
return [region.name for region in self.regions.all()]
return [region.name.encode("utf-8", "replace") for region in self.regions.all()]

def spatial_representation_type_string(self):
if hasattr(self.spatial_representation_type, 'identifier'):
Expand Down
47 changes: 31 additions & 16 deletions geonode/geoserver/signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,14 +243,17 @@ def geoserver_post_save_local(instance, *args, **kwargs):
instance.workspace = gs_resource.store.workspace.name
instance.store = gs_resource.store.name

bbox = gs_resource.native_bbox

# Set bounding box values
instance.bbox_x0 = bbox[0]
instance.bbox_x1 = bbox[1]
instance.bbox_y0 = bbox[2]
instance.bbox_y1 = bbox[3]
instance.srid = bbox[4]
try:
bbox = gs_resource.native_bbox

# Set bounding box values
instance.bbox_x0 = bbox[0]
instance.bbox_x1 = bbox[1]
instance.bbox_y0 = bbox[2]
instance.bbox_y1 = bbox[3]
instance.srid = bbox[4]
except BaseException:
pass

if instance.srid:
instance.srid_url = "http://www.spatialreference.org/ref/" + \
Expand All @@ -271,14 +274,17 @@ def geoserver_post_save_local(instance, *args, **kwargs):
gs_catalog.save(gs_resource)

if not settings.FREETEXT_KEYWORDS_READONLY:
if len(instance.keyword_list()) == 0 and gs_resource.keywords:
for keyword in gs_resource.keywords:
if keyword not in instance.keyword_list():
instance.keywords.add(keyword)
try:
if len(instance.keyword_list()) == 0 and gs_resource.keywords:
for keyword in gs_resource.keywords:
if keyword not in instance.keyword_list():
instance.keywords.add(keyword)
except BaseException:
pass

if any(instance.keyword_list()):
keywords = instance.keyword_list()
gs_resource.keywords = list(set(keywords))
gs_resource.keywords = [kw.decode("utf-8", "replace") for kw in list(set(keywords))]

# gs_resource should only be called if
# ogc_server_settings.BACKEND_WRITE_ENABLED == True
Expand Down Expand Up @@ -321,7 +327,10 @@ def geoserver_post_save_local(instance, *args, **kwargs):
# store the resource to avoid another geoserver call in the post_save
instance.gs_resource = gs_resource

bbox = gs_resource.native_bbox
try:
bbox = gs_resource.native_bbox
except BaseException:
bbox = instance.bbox
dx = float(bbox[1]) - float(bbox[0])
dy = float(bbox[3]) - float(bbox[2])

Expand All @@ -337,7 +346,10 @@ def geoserver_post_save_local(instance, *args, **kwargs):
instance.bbox_x1, instance.bbox_y1])

# Create Raw Data download link
path = gs_resource.dom.findall('nativeName')
try:
path = gs_resource.dom.findall('nativeName')
except BaseException:
path = instance.alternate
download_url = urljoin(settings.SITEURL,
reverse('download', args=[instance.id]))
Link.objects.get_or_create(resource=instance.resourcebase_ptr,
Expand Down Expand Up @@ -397,7 +409,10 @@ def geoserver_post_save_local(instance, *args, **kwargs):
url=ogc_server_settings.public_url,
repo_name=geogig_repo_name)

path = gs_resource.dom.findall('nativeName')
try:
path = gs_resource.dom.findall('nativeName')
except BaseException:
path = instance.alternate

if path:
path = 'path={path}'.format(path=path[0].text)
Expand Down
11 changes: 8 additions & 3 deletions geonode/geoserver/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,8 +180,13 @@ def geoserver_upload(
# Step 6. Make sure our data always has a valid projection
# FIXME: Put this in gsconfig.py
logger.info('>>> Step 6. Making sure [%s] has a valid projection' % name)
if gs_resource.native_bbox is None:
box = gs_resource.native_bbox[:4]
_native_bbox = None
try:
_native_bbox = gs_resource.native_bbox
except BaseException:
pass
if _native_bbox and len(_native_bbox) >= 5 and _native_bbox[4:5][0] == 'EPSG:4326':
box = _native_bbox[:4]
minx, maxx, miny, maxy = [float(a) for a in box]
if -180 <= minx <= 180 and -180 <= maxx <= 180 and \
- 90 <= miny <= 90 and -90 <= maxy <= 90:
Expand All @@ -190,7 +195,7 @@ def geoserver_upload(
# If GeoServer couldn't figure out the projection, we just
# assume it's lat/lon to avoid a bad GeoServer configuration

gs_resource.latlon_bbox = gs_resource.native_bbox
gs_resource.latlon_bbox = _native_bbox
gs_resource.projection = "EPSG:4326"
cat.save(gs_resource)
else:
Expand Down
2 changes: 1 addition & 1 deletion geonode/geoserver/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -851,7 +851,7 @@ def get_capabilities(request, layerid=None, user=None,
'catalogue_url': settings.CATALOGUE['default']['URL'],
}
gc_str = tpl.render(ctx)
gc_str = gc_str.encode("utf-8")
gc_str = gc_str.encode("utf-8", "replace")
layerelem = etree.XML(gc_str)
rootdoc = etree.ElementTree(layerelem)
except Exception as e:
Expand Down
2 changes: 1 addition & 1 deletion geonode/layers/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ class Attribute(models.Model):

def __unicode__(self):
return "%s" % self.attribute_label.encode(
"utf-8") if self.attribute_label else self.attribute.encode("utf-8")
"utf-8", "replace") if self.attribute_label else self.attribute.encode("utf-8", "replace")

def unique_values_as_list(self):
return self.unique_values.split(',')
Expand Down
Binary file added geonode/tests/data/ming_female_1.zip
Binary file not shown.
1 change: 1 addition & 0 deletions geonode/tests/data/ming_female_1/ming_female_1.cst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
UTF-8
Binary file not shown.
1 change: 1 addition & 0 deletions geonode/tests/data/ming_female_1/ming_female_1.prj
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
GEOGCS["Xian 1980", DATUM["Xian 1980", SPHEROID["IAG 1975", 6378140.0, 298.257, AUTHORITY["EPSG","7049"]], AUTHORITY["EPSG","6610"]], PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]], UNIT["degree", 0.017453292519943295], AXIS["Geodetic longitude", EAST], AXIS["Geodetic latitude", NORTH], AUTHORITY["EPSG","4610"]]
Binary file not shown.
Binary file not shown.
1 change: 1 addition & 0 deletions geonode/tests/data/ming_female_1/wfsrequest.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
http://amap.zju.edu.cn:8080/geoserver/wfs?access_token=vKa0jUDKSIJVS8mxn3QbeXlNKbxFlO?format_options=charset%3AUTF-8&typename=geonode%3Aming_female_1&outputFormat=SHAPE-ZIP&version=1.0.0&service=WFS&request=GetFeature&access_token=vKa0jUDKSIJVS8mxn3QbeXlNKbxFlO
Binary file added geonode/tests/data/zhejiang_yangcan_yanyu.zip
Binary file not shown.
1 change: 1 addition & 0 deletions geonode/tests/data/zhejiang_yangcan_yanyu/wfsrequest.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
http://128.31.22.46:8080/geoserver/wfs?access_token=TCSt2nAzYCZeeSF9jTDULPff6YUWrI?outputFormat=SHAPE-ZIP&service=WFS&srs=EPSG%3A4610&request=GetFeature&format_options=charset%3AUTF-8&typename=geonode%3Azhejiang_yangcan_yanyu&version=1.0.0&access_token=TCSt2nAzYCZeeSF9jTDULPff6YUWrI
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
UTF-8
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
GEOGCS["Xian 1980", DATUM["Xian 1980", SPHEROID["IAG 1975", 6378140.0, 298.257, AUTHORITY["EPSG","7049"]], AUTHORITY["EPSG","6610"]], PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]], UNIT["degree", 0.017453292519943295], AXIS["Geodetic longitude", EAST], AXIS["Geodetic latitude", NORTH], AUTHORITY["EPSG","4610"]]
Binary file not shown.
Binary file not shown.
65 changes: 53 additions & 12 deletions geonode/tests/integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,13 +512,8 @@ def test_layer_upload_metadata(self):
# layer have projection file, but has no valid srid
self.assertEqual(
str(e),
"Invalid Layers. "
"Needs an authoritative SRID in its CRS to be accepted")
# except:
# # Sometimes failes with the message:
# # UploadError: Could not save the layer air_runways,
# # there was an upload error: Error occured unzipping file
# pass
"GeoServer failed to detect the projection for layer [air_runways]. "
"It doesn't look like EPSG:4326, so backing out the layer.")
finally:
# Clean up and completely delete the layer
if uploaded:
Expand Down Expand Up @@ -610,11 +605,57 @@ def test_layer_zip_upload_metadata(self):
uploaded.metadata_xml = thelayer_metadata
regions_resolved, regions_unresolved = resolve_regions(regions)
self.assertIsNotNone(regions_resolved)
# except:
# # Sometimes failes with the message:
# # UploadError: Could not save the layer air_runways,
# # there was an upload error: Error occured unzipping file
# pass
finally:
# Clean up and completely delete the layer
if uploaded:
uploaded.delete()

@on_ogc_backend(geoserver.BACKEND_PACKAGE)
@timeout_decorator.timeout(LOCAL_TIMEOUT)
def test_layer_zip_upload_non_utf8(self):
"""Test uploading a layer with non UTF-8 attributes names"""
uploaded = None
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
thelayer_path = os.path.join(
PROJECT_ROOT,
'data/zhejiang_yangcan_yanyu')
thelayer_zip = os.path.join(
PROJECT_ROOT,
'data/',
'zhejiang_yangcan_yanyu.zip')
try:
if os.path.exists(thelayer_zip):
os.remove(thelayer_zip)
if os.path.exists(thelayer_path) and not os.path.exists(thelayer_zip):
zip_dir(thelayer_path, thelayer_zip)
if os.path.exists(thelayer_zip):
uploaded = file_upload(thelayer_zip, overwrite=True)
self.assertEquals(uploaded.title, 'Zhejiang Yangcan Yanyu')
self.assertEquals(len(uploaded.keyword_list()), 0)
self.assertEquals(uploaded.constraints_other, None)
finally:
# Clean up and completely delete the layer
if uploaded:
uploaded.delete()

uploaded = None
thelayer_path = os.path.join(
PROJECT_ROOT,
'data/ming_female_1')
thelayer_zip = os.path.join(
PROJECT_ROOT,
'data/',
'ming_female_1.zip')
try:
if os.path.exists(thelayer_zip):
os.remove(thelayer_zip)
if os.path.exists(thelayer_path) and not os.path.exists(thelayer_zip):
zip_dir(thelayer_path, thelayer_zip)
if os.path.exists(thelayer_zip):
uploaded = file_upload(thelayer_zip, overwrite=True)
self.assertEquals(uploaded.title, 'Ming Female 1')
self.assertEquals(len(uploaded.keyword_list()), 0)
self.assertEquals(uploaded.constraints_other, None)
finally:
# Clean up and completely delete the layer
if uploaded:
Expand Down
8 changes: 4 additions & 4 deletions geonode/upload/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class Meta:

def get_session(self):
if self.session:
return pickle.loads(str(self.session))
return pickle.loads(self.session.encode("utf-8", "replace"))

def update_from_session(self, upload_session):
self.state = upload_session.import_session.state
Expand All @@ -97,10 +97,10 @@ def update_from_session(self, upload_session):
self.session = None
else:
# Make sure we don't pickle UTF-8 chars
upload_session.user.first_name = u'{}'.format(upload_session.user.first_name).encode('ascii', 'ignore')
upload_session.user.last_name = u'{}'.format(upload_session.user.last_name).encode('ascii', 'ignore')
upload_session.user.first_name = u'{}'.format(upload_session.user.first_name).decode("utf-8", "replace")
upload_session.user.last_name = u'{}'.format(upload_session.user.last_name).decode("utf-8", "replace")
unicode_session = pickle.dumps(upload_session)
self.session = unicode_session
self.session = unicode_session.decode("utf-8", "replace")
if self.upload_dir is None:
self.upload_dir = path.dirname(upload_session.base_file)
self.name = upload_session.layer_title or upload_session.name
Expand Down
25 changes: 14 additions & 11 deletions geonode/upload/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,17 +523,20 @@ def _get_layer_values(layer, upload_session, expand=0):
lyr = inDataSource.GetLayer(str(layer.name))
limit = 100
for feat in islice(lyr, 0, limit):
feat_values = json_loads_byteified(feat.ExportToJson()).get('properties')
for k in feat_values.keys():
type_code = feat.GetFieldDefnRef(k).GetType()
binding = feat.GetFieldDefnRef(k).GetFieldTypeName(type_code)
feat_value = feat_values[k] if str(feat_values[k]) != 'None' else 0
if expand > 0:
ff = {'value': feat_value, 'binding': binding}
feat_values[k] = ff
else:
feat_values[k] = feat_value
layer_values.append(feat_values)
try:
feat_values = json_loads_byteified(feat.ExportToJson()).get('properties')
for k in feat_values.keys():
type_code = feat.GetFieldDefnRef(k).GetType()
binding = feat.GetFieldDefnRef(k).GetFieldTypeName(type_code)
feat_value = feat_values[k] if str(feat_values[k]) != 'None' else 0
if expand > 0:
ff = {'value': feat_value, 'binding': binding}
feat_values[k] = ff
else:
feat_values[k] = feat_value
layer_values.append(feat_values)
except BaseException:
pass
return layer_values


Expand Down