{% blocktrans %}A dataset can support one or two time attributes. If a single
+ attribute is used, the dataset is considered to contain data that is valid at single points in time. If two
+ attributes are used, the second attribute represents the end of a valid period hence the dataset is considered
+ to contain data that is valid at certain periods in time.{% endblocktrans %}
+
{% trans "Selecting an Attribute" %}
+
{% trans "A time attribute can be" %}:
+
+
{% trans "An existing date" %}
+
{% trans "Text that can be converted to a timestamp" %}
+
{% trans "A number representing a year" %}
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/geonode/layers/tests.py b/geonode/layers/tests.py
index 1d3b87406fc..293e9f662cb 100644
--- a/geonode/layers/tests.py
+++ b/geonode/layers/tests.py
@@ -60,7 +60,7 @@
from geonode.resource.manager import resource_manager
from geonode.tests.utils import NotificationsTestsHelper
from geonode.layers.models import Dataset, Style, Attribute
-from geonode.layers.forms import DatasetForm, JSONField, LayerUploadForm
+from geonode.layers.forms import DatasetForm, DatasetTimeSerieForm, JSONField, LayerUploadForm
from geonode.layers.populate_datasets_data import create_dataset_data
from geonode.base.models import TopicCategory, License, Region, Link
from geonode.utils import check_ogc_backend, set_resource_default_links
@@ -1861,6 +1861,7 @@ def setUp(self) -> None:
self.user = get_user_model().objects.get(username='admin')
self.dataset = create_single_dataset("my_single_layer", owner=self.user)
self.sut = DatasetForm
+ self.time_form = DatasetTimeSerieForm
def test_resource_form_is_invalid_extra_metadata_not_json_format(self):
self.client.login(username="admin", password="admin")
@@ -1915,3 +1916,48 @@ def test_resource_form_is_valid_extra_metadata(self):
"extra_metadata": '[{"id": 1, "filter_header": "object", "field_name": "object", "field_label": "object", "field_value": "object"}]'
})
self.assertTrue(form.is_valid())
+
+ def test_dataset_time_form_should_work(self):
+
+ attr, _ = Attribute.objects.get_or_create(
+ dataset=self.dataset,
+ attribute="field_date",
+ attribute_type="xsd:dateTime"
+ )
+ self.dataset.attribute_set.add(attr)
+ self.dataset.save()
+ form = self.time_form(
+ instance=self.dataset,
+ data={
+ 'attribute': self.dataset.attributes.first().id,
+ 'end_attribute': '',
+ 'presentation': 'DISCRETE_INTERVAL',
+ 'precision_value': 12345,
+ 'precision_step': 'seconds'
+ }
+ )
+ self.assertTrue(form.is_valid())
+ self.assertDictEqual({}, form.errors)
+
+ def test_dataset_time_form_should_raise_error_if_invalid_payload(self):
+
+ attr, _ = Attribute.objects.get_or_create(
+ dataset=self.dataset,
+ attribute="field_date",
+ attribute_type="xsd:dateTime"
+ )
+ self.dataset.attribute_set.add(attr)
+ self.dataset.save()
+ form = self.time_form(
+ instance=self.dataset,
+ data={
+ 'attribute': self.dataset.attributes.first().id,
+ 'end_attribute': '',
+ 'presentation': 'INVALID_PRESENTATION_VALUE',
+ 'precision_value': 12345,
+ 'precision_step': 'seconds'
+ }
+ )
+ self.assertFalse(form.is_valid())
+ self.assertTrue('presentation' in form.errors)
+ self.assertEqual("Select a valid choice. INVALID_PRESENTATION_VALUE is not one of the available choices.", form.errors['presentation'][0])
diff --git a/geonode/layers/views.py b/geonode/layers/views.py
index 7e749bb97ce..96d0d67fddb 100644
--- a/geonode/layers/views.py
+++ b/geonode/layers/views.py
@@ -65,6 +65,7 @@
from geonode.decorators import check_keyword_write_perms
from geonode.layers.forms import (
DatasetForm,
+ DatasetTimeSerieForm,
LayerAttributeForm,
NewLayerUploadForm)
from geonode.layers.models import (
@@ -485,6 +486,7 @@ def dataset_metadata(
thumbnail_url = layer.thumbnail_url
dataset_form = DatasetForm(request.POST, instance=layer, prefix="resource", user=request.user)
+
if not dataset_form.is_valid():
logger.error(f"Dataset Metadata form is not valid: {dataset_form.errors}")
out = {
@@ -542,6 +544,18 @@ def dataset_metadata(
json.dumps(out),
content_type='application/json',
status=400)
+
+ timeseries_form = DatasetTimeSerieForm(request.POST, instance=layer, prefix='timeseries')
+ if not timeseries_form.is_valid():
+ out = {
+ 'success': False,
+ 'errors': [f"{x}: {y[0].messages[0]}" for x, y in timeseries_form.errors.as_data().items()]
+ }
+ logger.error(f"{out.get('errors')}")
+ return HttpResponse(
+ json.dumps(out),
+ content_type='application/json',
+ status=400)
else:
dataset_form = DatasetForm(instance=layer, prefix="resource", user=request.user)
dataset_form.disable_keywords_widget_for_non_superuser(request.user)
@@ -553,6 +567,37 @@ def dataset_metadata(
prefix="category_choice_field",
initial=topic_category.id if topic_category else None)
+ gs_layer = gs_catalog.get_layer(name=layer.name)
+ initial = {}
+ if gs_layer is not None and layer.has_time:
+ gs_time_info = gs_layer.resource.metadata.get("time")
+ if gs_time_info.enabled:
+ _attr = layer.attributes.filter(attribute=gs_time_info.attribute).first()
+ initial["attribute"] = _attr.pk if _attr else None
+ if gs_time_info.end_attribute is not None:
+ end_attr = layer.attributes.filter(attribute=gs_time_info.end_attribute).first()
+ initial["end_attribute"] = end_attr.pk if end_attr else None
+ initial["presentation"] = gs_time_info.presentation
+ lookup_value = sorted(list(gs_time_info._lookup), key=lambda x: x[1], reverse=True)
+ if gs_time_info.resolution is not None:
+ res = gs_time_info.resolution // 1000
+ for el in lookup_value:
+ if res % el[1] == 0:
+ initial["precision_value"] = res // el[1]
+ initial["precision_step"] = el[0]
+ break
+ else:
+ initial["precision_value"] = gs_time_info.resolution
+ initial["precision_step"] = "seconds"
+
+ timeseries_form = DatasetTimeSerieForm(
+ instance=layer,
+ prefix="timeseries",
+ initial=initial
+ )
+ timeseries_form.fields.get('attribute').queryset = layer.attributes.filter(attribute_type__in=['xsd:dateTime'])
+ timeseries_form.fields.get('end_attribute').queryset = layer.attributes.filter(attribute_type__in=['xsd:dateTime'])
+
# Create THESAURUS widgets
lang = settings.THESAURUS_DEFAULT_LANG if hasattr(settings, 'THESAURUS_DEFAULT_LANG') else 'en'
if hasattr(settings, 'THESAURUS') and settings.THESAURUS:
@@ -587,7 +632,7 @@ def dataset_metadata(
tkeywords_form.fields[tid].initial = values
if request.method == "POST" and dataset_form.is_valid() and attribute_form.is_valid(
- ) and category_form.is_valid() and tkeywords_form.is_valid():
+ ) and category_form.is_valid() and tkeywords_form.is_valid() and timeseries_form.is_valid():
new_poc = dataset_form.cleaned_data['poc']
new_author = dataset_form.cleaned_data['metadata_author']
@@ -696,6 +741,27 @@ def dataset_metadata(
if any([x in dataset_form.changed_data for x in ['is_approved', 'is_published']]):
vals['is_approved'] = dataset_form.cleaned_data.get('is_approved', layer.is_approved)
vals['is_published'] = dataset_form.cleaned_data.get('is_published', layer.is_published)
+
+ layer.has_time = dataset_form.cleaned_data.get('has_time', layer.has_time)
+
+ if timeseries_form.cleaned_data and ('has_time' in dataset_form.changed_data or timeseries_form.changed_data):
+ ts = timeseries_form.cleaned_data
+ end_attr = layer.attributes.get(pk=ts.get("end_attribute")).attribute if ts.get("end_attribute") else None
+ start_attr = layer.attributes.get(pk=ts.get("attribute")).attribute if ts.get("attribute") else None
+ resource_manager.exec(
+ 'set_time_info',
+ None,
+ instance=layer,
+ time_info={
+ "attribute": start_attr,
+ "end_attribute": end_attr,
+ "presentation": ts.get('presentation', None),
+ "precision_value": ts.get('precision_value', None),
+ "precision_step": ts.get('precision_step', None),
+ "enabled": dataset_form.cleaned_data.get('has_time', False)
+ }
+ )
+
resource_manager.update(
layer.uuid,
instance=layer,
@@ -703,6 +769,7 @@ def dataset_metadata(
vals=vals,
extra_metadata=json.loads(dataset_form.cleaned_data['extra_metadata'])
)
+
return HttpResponse(json.dumps({'message': message}))
if not AdvancedSecurityWorkflowManager.is_allowed_to_publish(request.user, layer):
@@ -736,6 +803,7 @@ def dataset_metadata(
"poc_form": poc_form,
"author_form": author_form,
"attribute_form": attribute_form,
+ "timeseries_form": timeseries_form,
"category_form": category_form,
"tkeywords_form": tkeywords_form,
"preview": getattr(settings, 'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY', 'mapstore'),
diff --git a/geonode/security/models.py b/geonode/security/models.py
index 81a3c10e724..d4e7f0b9aa8 100644
--- a/geonode/security/models.py
+++ b/geonode/security/models.py
@@ -351,7 +351,7 @@ def get_user_perms(self, user):
PERMISSIONS_TO_FETCH = VIEW_PERMISSIONS + DOWNLOAD_PERMISSIONS + ADMIN_PERMISSIONS + SERVICE_PERMISSIONS
# include explicit permissions appliable to "subtype == 'vector'"
- if self.subtype == 'vector':
+ if self.subtype in ['vector', 'vector_time']:
PERMISSIONS_TO_FETCH += DATASET_ADMIN_PERMISSIONS
elif self.subtype == 'raster':
PERMISSIONS_TO_FETCH += DATASET_EDIT_STYLE_PERMISSIONS
diff --git a/geonode/templates/metadata_form_js.html b/geonode/templates/metadata_form_js.html
index fefc2ebf43e..f3408aa23cb 100644
--- a/geonode/templates/metadata_form_js.html
+++ b/geonode/templates/metadata_form_js.html
@@ -415,15 +415,42 @@
{% endblock onInputChange %}
$(document).ready(function() {
+
+ if($("#id_resource-has_time").is(":checked")) {
+ $("#settings_time_series").show();
+ if ($("#id_timeseries-presentation").val() == 'LIST') {
+ $("#precision_value").hide();
+ }
+ } else {
+ $("#settings_time_series").hide();
+ };
+
+ $("#id_resource-has_time").click(function() {
+ if($(this).is(":checked")) {
+ $("#settings_time_series").show();
+ if ($("#id_timeseries-presentation").val() == 'LIST') {
+ $("#precision_value").hide();
+ }
+ } else {
+ $("#settings_time_series").hide();
+ }
+ });
+
+ $("#id_timeseries-presentation").change(function() {
+ if(this.value != 'LIST') {
+ $("#precision_value").show();
+ } else {
+ $("#precision_value").hide();
+ }
+ });
+
{% block hints %}
$('#completeness-hints .mandatory-hint').click(getTab.bind(null, 1));
$('#completeness-hints .advanced-hint').click(getTab.bind(null, 2));
{% endblock hints %}
-
-
-
+
{% block trigger_onInputChange %}
{{UI_REQUIRED_FIELDS}}.forEach(element => $('#' + element).change(onInputChange).change());
diff --git a/geonode/upload/upload.py b/geonode/upload/upload.py
index 4caf90f631f..3e40bda28bd 100644
--- a/geonode/upload/upload.py
+++ b/geonode/upload/upload.py
@@ -641,6 +641,9 @@ def final_step(upload_session, user, charset="UTF-8", dataset_id=None):
# @todo see above in save_step, regarding computed unique name
name = task.layer.name
target = task.target
+ has_time = False
+ if upload_session.time and upload_session.time_info and upload_session.time_transforms:
+ has_time = True
_vals = dict(
title=upload_session.dataset_title,
@@ -649,7 +652,8 @@ def final_step(upload_session, user, charset="UTF-8", dataset_id=None):
store=target.name,
name=task.layer.name,
workspace=target.workspace_name,
- subtype=get_dataset_storetype(target.store_type))
+ subtype=get_dataset_storetype(target.store_type) if not has_time else get_dataset_storetype('vectorTimeSeries')
+ )
if saved_dataset:
name = saved_dataset.get_real_instance().name
@@ -920,12 +924,13 @@ def final_step(upload_session, user, charset="UTF-8", dataset_id=None):
with transaction.atomic():
resource_manager.set_permissions(
None, instance=saved_dataset, permissions=permissions, created=created)
+ resource_manager.exec(
+ 'set_time_info', None, instance=saved_dataset, time_info=upload_session.time_info)
+ saved_dataset.refresh_from_db()
resource_manager.update(
None, instance=saved_dataset, xml_file=xml_file, metadata_uploaded=metadata_uploaded)
resource_manager.exec(
'set_style', None, instance=saved_dataset, sld_uploaded=sld_uploaded, sld_file=sld_file, tempdir=upload_session.tempdir)
- resource_manager.exec(
- 'set_time_info', None, instance=saved_dataset, time_info=upload_session.time_info)
resource_manager.set_thumbnail(
None, instance=saved_dataset)