Skip to content

Commit

Permalink
Merge pull request #693 from IFRCGo/release/v1.2.0
Browse files Browse the repository at this point in the history
Release/v1.2.0
GergiH authored Apr 30, 2020
2 parents 5177e67 + 111f366 commit c016a11
Showing 33 changed files with 1,249 additions and 185 deletions.
2 changes: 1 addition & 1 deletion .flake8
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[flake8]
ignore = C901, W504
max-line-length = 120
max-line-length = 130
exclude = .git,__pycache__,old,build,dist
max-complexity = 10
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -33,3 +33,6 @@ appeals.json

# Personal / IDE / Editor stuff
.vscode

# media
project-imports/
18 changes: 17 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -6,6 +6,21 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.

## Unreleased

## 1.2.0

### Added
- Fetch FTS HPC Data using google sheet.
- Add visibility support for project. (Public, Login required, IFRC Only)
- New Programme Type `Domestic`
- Add Bulk Project Import in Admin Panel.
- Enable history for Project changes.
- Add Sector/SectorTag `Health (private)` and `COVID-19`.
- Add API for Project for region.
- Add Multiselect filters for Project API enumfields.

### Changed
- Change Sector/SectorTag `Health` to `Health (public)`.

## 1.1.272

### Added
@@ -1123,7 +1138,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.

## 0.1.20

[Unreleased]: https://github.com/IFRCGo/go-api/compare/1.1.272...HEAD
[Unreleased]: https://github.com/IFRCGo/go-api/compare/1.2.0...HEAD
[1.2.0]: https://github.com/IFRCGo/go-api/compare/1.2.0...1.1.272
[1.1.272]: https://github.com/IFRCGo/go-api/compare/1.1.271...1.1.272
[1.1.271]: https://github.com/IFRCGo/go-api/compare/1.1.269...1.1.271
[1.1.269]: https://github.com/IFRCGo/go-api/compare/1.1.268...1.1.269
16 changes: 16 additions & 0 deletions api/admin.py
Original file line number Diff line number Diff line change
@@ -486,9 +486,25 @@ class SituationReportTypeAdmin(CompareVersionAdmin):
search_fields = ('type',)

class CronJobAdmin(CompareVersionAdmin):
list_display = ('name', 'created_at', 'num_result', 'status')
search_fields = ('name', 'created_at',)
readonly_fields = ('created_at',)
list_filter = ('status', 'name')
readonly_fields = ('message_display',)

def message_display(self, obj):
style_class = {
models.CronJobStatus.WARNED: 'warning',
models.CronJobStatus.ERRONEOUS: 'error',
}.get(obj.status, 'success')
if obj.message:
return mark_safe(
f'''
<ul class="messagelist" style="margin-left: 0px;">
<li class="{style_class}"><pre>{obj.message}</pre></li>
</ul>
'''
)


class EmergencyOperationsDatasetAdmin(CompareVersionAdmin):
9 changes: 6 additions & 3 deletions api/drf_views.py
Original file line number Diff line number Diff line change
@@ -67,7 +67,7 @@
MiniDistrictSerializer,

SnippetSerializer,
MiniEventSerializer,
ListMiniEventSerializer,
ListEventSerializer,
ListEventDeploymentsSerializer,
DetailEventSerializer,
@@ -228,6 +228,9 @@ class EventFilter(filters.FilterSet):
countries__in = ListFilter(field_name='countries__id')
regions__in = ListFilter(field_name='regions__id')
id = filters.NumberFilter(field_name='id', lookup_expr='exact')
auto_generated_source = filters.ChoiceFilter(
label='Auto generated source choices', choices=[(v, v) for v in SOURCES.values()],
)
class Meta:
model = Event
fields = {
@@ -244,12 +247,12 @@ class EventViewset(viewsets.ReadOnlyModelViewSet):

def get_queryset(self):
if self.action == 'mini_events':
return Event.objects.filter(parent_event__isnull=True).values('id', 'name')
return Event.objects.filter(parent_event__isnull=True).prefetch_related('dtype')
return Event.objects.filter(parent_event__isnull=True)

def get_serializer_class(self):
if self.action == 'mini_events':
return MiniEventSerializer
return ListMiniEventSerializer
elif self.action == 'list':
return ListEventSerializer
else:
18 changes: 18 additions & 0 deletions api/migrations/0059_auto_20200430_0852.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 2.2.10 on 2020-04-30 08:52

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('api', '0058_auto_20200415_1606'),
]

operations = [
migrations.AlterField(
model_name='reversiondifferencelog',
name='object_name',
field=models.TextField(blank=True, null=True),
),
]
2 changes: 1 addition & 1 deletion api/models.py
Original file line number Diff line number Diff line change
@@ -1461,7 +1461,7 @@ class ReversionDifferenceLog(models.Model):
action = models.CharField(max_length=64) # Added, Changed, etc
username = models.CharField(max_length=256, null=True)
object_id = models.CharField(max_length=191, blank=True)
object_name = models.CharField(max_length=2000, null=True, blank=True) # the name of the record
object_name = models.TextField(null=True, blank=True) # the name of the record
object_type = models.CharField(max_length=50, blank=True) # Emergency, Appeal, etc
changed_from = ArrayField(
models.TextField(null=True, blank=True),
9 changes: 9 additions & 0 deletions api/serializers.py
Original file line number Diff line number Diff line change
@@ -181,6 +181,15 @@ class Meta:
model = Event
fields = ('name', 'dtype', 'id', 'slug', 'parent_event',)


class ListMiniEventSerializer(serializers.ModelSerializer):
dtype = DisasterTypeSerializer(read_only=True)

class Meta:
model = Event
fields = ('id', 'name', 'slug', 'dtype', 'auto_generated_source')


class ListEventSerializer(serializers.ModelSerializer):
appeals = RelatedAppealSerializer(many=True, read_only=True)
countries = MiniCountrySerializer(many=True)
11 changes: 11 additions & 0 deletions api/templates/admin/change_list.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{% extends "admin/change_list.html" %}

{% block object-tools-items %}

{% if additional_addlinks %}
{% for addlink in additional_addlinks %}
<li><a href="{% url addlink.namespace %}">{{addlink.label}}</a></li>
{% endfor %}
{% endif %}
{{ block.super }}
{% endblock %}
36 changes: 36 additions & 0 deletions api/templates/admin/import_form.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{% extends 'admin/change_form.html' %}
{% load i18n admin_urls static admin_modify %}

{% block extrahead %}
{{ block.super }}
{{ form.media }}
<script type="text/javascript" src="{% url 'admin:jsi18n' %}"></script>
<script type="text/javascript" src="{% static 'admin/js/core.js' %}"></script>
{% endblock %}

{% block breadcrumbs %}
<div class="breadcrumbs">
<a href="{% url 'admin:index' %}">{% trans 'Home' %}</a>
&rsaquo; <a href="{% url 'admin:app_list' app_label=opts.app_label %}">{{ opts.app_config.verbose_name }}</a>
&rsaquo; {% if has_view_permission %}<a href="{% url opts|admin_urlname:'changelist' %}">{{ opts.verbose_name_plural|capfirst }}</a> {% else %}{{ opts.verbose_name_plural|capfirst }}{% endif %}
&rsaquo; Import
</div>
{% endblock %}

{% block content_title %}
<h1>IMPORT <b>{{opts.verbose_name | upper}}</b></h1>
{% endblock %}

{% block content %}
<form method="post" {% if form.is_multipart %}enctype="multipart/form-data"{% endif %} >
{% csrf_token %}
<div>
{% for fieldset in adminform %}
{% include "admin/includes/fieldset.html" %}
{% endfor %}
</div>
<div class="submit-row">
<input type="submit" value="Submit" class="default"/>
</div>
</form>
{% endblock %}
23 changes: 19 additions & 4 deletions databank/management/commands/ingest_databank.py
Original file line number Diff line number Diff line change
@@ -43,7 +43,18 @@ def load(self):
if hasattr(source, 'prefetch'):
start = datetime.datetime.now()
print(f'\t -> {name}', end='')
source_prefetch_data[source.__name__] = source.prefetch()
prefetch_response = source.prefetch()
if prefetch_response is not None:
source_prefetch_data[source.__name__], item_count, sources = prefetch_response
# Log success prefetch
CronJob.sync_cron({
'name': name,
'message': f'Done querying {name}' + (
sources and f' using sources: {sources}'
) or '',
'num_result': item_count,
'status': CronJobStatus.SUCCESSFUL,
})
print(f' [{datetime.datetime.now() - start}]')

# Load
@@ -73,9 +84,13 @@ def load(self):
print(f' [{datetime.datetime.now() - start}]')
overview.save()
index += 1
if name == 'FTS_HPC': # This source can not be checked/logged via prefetch, that is why we do it here, after the "load".
body = { "name": name, "message": "Done querying " + name + " data feeds", "num_result": index, "status": CronJobStatus.SUCCESSFUL }
CronJob.sync_cron(body)
# This source can not be checked/logged via prefetch, that is why we do it here, after the "load".
if name == 'FTS_HPC':
CronJob.sync_cron({
'name': name,
'message': f'Done querying {name} data feeds',
'num_result': index, "status": CronJobStatus.SUCCESSFUL,
})

def handle(self, *args, **kwargs):
start = datetime.datetime.now()
12 changes: 2 additions & 10 deletions databank/management/commands/sources/FDRS.py
Original file line number Diff line number Diff line change
@@ -3,7 +3,6 @@
from django.conf import settings

from api.utils import base64_encode
from api.models import CronJob, CronJobStatus
from databank.models import CountryOverview as CO
from .utils import catch_error

@@ -44,11 +43,7 @@
@catch_error('Error occured while fetching from FDRS API, Please make sure valid FDRS_CREDENTIAL is provided')
def prefetch():
fdrs_entities = requests.get(FDRS_NS_API_ENDPOINT, headers=FDRS_HEADERS)

if fdrs_entities.status_code != 200:
body = { "name": "FDRS", "message": "Error querying FDRS NS API feed at " + FDRS_NS_API_ENDPOINT, "status": CronJobStatus.ERRONEOUS } # not every case is catched here, e.g. if the base URL is wrong...
CronJob.sync_cron(body)
return {}
fdrs_entities.raise_for_status()
fdrs_entities = fdrs_entities.json()

ns_iso_map = {
@@ -57,9 +52,6 @@ def prefetch():
for ns in fdrs_entities
}

body = { "name": "FDRS", "message": "Done querying FDRS NS API feed at " + FDRS_NS_API_ENDPOINT, "num_result": len(ns_iso_map), "status": CronJobStatus.SUCCESSFUL }
CronJob.sync_cron(body)

return {
# KEY <ISO2>-<Indicator_ID>: {year: '', value: ''}
f"{ns_iso_map[ns_data['id']].upper()}-{indicator_data['id']}": (
@@ -69,7 +61,7 @@ def prefetch():
)
for indicator_data in requests.get(FDRS_DATA_API_ENDPOINT, headers=FDRS_HEADERS).json()['data']
for ns_data in indicator_data['data']
}
}, len(ns_iso_map), FDRS_DATA_API_ENDPOINT


@catch_error()
44 changes: 31 additions & 13 deletions databank/management/commands/sources/FTS_HPC.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
import csv
import io
import datetime
import requests

# from django.conf import settings
# from api.utils import base64_encode

from .utils import catch_error, get_country_by_iso2
from api.models import CronJob, CronJobStatus


FTS_URL = 'https://api.hpc.tools/v1/public/fts/flow?countryISO3={0}&groupby=year&report=3'
EMERGENCY_URL = 'https://api.hpc.tools/v1/public/emergency/country/{0}'
GOOGLE_SHEET_URL = 'https://docs.google.com/spreadsheets/d/1MArQSVdbLXLaQ8ixUKo9jIjifTCVDDxTJYbGoRuw3Vw/gviz/tq?tqx=out:csv'

HEADERS = {
# TODO: USE Crendentils here
@@ -18,28 +20,43 @@


@catch_error()
def load(country, overview, _):
def prefetch():
g_sheet_data = requests.get(GOOGLE_SHEET_URL, headers=HEADERS)
g_sheet_data.raise_for_status()

g_sheet_data = list(csv.DictReader(io.StringIO(g_sheet_data.text)))

gho_data = {
f"{d['Country #country+code'].upper()}-{d['Year #date+year']}": {
'people_in_need': d['PIN #inneed'],
'people_targeted': d['PT #targeted'],
'funding_total_usd': d['Funding #value+funding+total+usd'],
'funding_required_usd': d['Requirements #value+funding+required+usd'],
}
for d in g_sheet_data
}

return gho_data, len(gho_data), GOOGLE_SHEET_URL


@catch_error()
def load(country, overview, gho_data):
pcountry = get_country_by_iso2(country.iso)
if pcountry is None:
return
fts_data = requests.get(FTS_URL.format(pcountry.alpha_3), headers=HEADERS)
emg_data = requests.get(EMERGENCY_URL.format(pcountry.alpha_3), headers=HEADERS)

if fts_data.status_code != 200:
body = { "name": "FTS_HPC", "message": "Error querying HPC fts data feed at " + FTS_URL, "status": CronJobStatus.ERRONEOUS } # not every case is catched here, e.g. if the base URL is wrong...
CronJob.sync_cron(body)
return {}
if emg_data.status_code != 200:
body = { "name": "FTS_HPC", "message": "Error querying HPC emergency data feed at " + EMERGENCY_URL, "status": CronJobStatus.ERRONEOUS } # not every case is catched here, e.g. if the base URL is wrong...
CronJob.sync_cron(body)
return {}
fts_data.raise_for_status()
emg_data.raise_for_status()

fts_data = fts_data.json()
emg_data = emg_data.json()

c_data = {}

# fundingTotals, pledgeTotals
for fund_area in ['fundingTotals', 'pledgeTotals']:
for fund_area, fund_area_s in [('fundingTotals', 'funding_totals'), ('pledgeTotals', 'pledge_totals')]:
fund_area_data = fts_data['data']['report3'][fund_area]['objects']
if len(fund_area_data) > 0:
for v in fund_area_data[0]['objectsBreakdown']:
@@ -49,9 +66,9 @@ def load(country, overview, _):
except ValueError:
continue
if year not in c_data:
c_data[year] = {fund_area: totalFunding}
c_data[year] = {fund_area_s: totalFunding}
else:
c_data[year][fund_area] = totalFunding
c_data[year][fund_area_s] = totalFunding

# numActivations
CronJobSum = 0
@@ -73,6 +90,7 @@ def load(country, overview, _):
{
'year': year,
**values,
**gho_data.get(f"{pcountry.alpha_3.upper()}-{year}", {}),
}
for year, values in c_data.items()
]
10 changes: 2 additions & 8 deletions databank/management/commands/sources/INFORM.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import requests

from databank.models import InformIndicator
from api.models import District, CronJob, CronJobStatus

from .utils import catch_error, get_country_by_iso3

@@ -17,10 +16,7 @@
def prefetch():
inform_data = {}
response_d = requests.get(INFORM_API_ENDPOINT)
if response_d.status_code != 200: # Because it is too often, it is set to WARNED, but should be ERRONEOUS:
body = { "name": "INFORM", "message": "Error querying Inform feed at " + INFORM_API_ENDPOINT, "status": CronJobStatus.WARNED } # not every case is catched here, e.g. if the base URL is wrong...
CronJob.sync_cron(body)
return inform_data
response_d.raise_for_status()
response_d = response_d.json()

for index, i_data in enumerate(response_d):
@@ -47,9 +43,7 @@ def prefetch():
else:
inform_data[pcountry.alpha_2].append(entry)

body = { "name": "INFORM", "message": "Done querying Inform feed at " + INFORM_API_ENDPOINT, "num_result": len(inform_data), "status": CronJobStatus.SUCCESSFUL }
CronJob.sync_cron(body)
return inform_data
return inform_data, len(inform_data), INFORM_API_ENDPOINT


@catch_error()
Loading

0 comments on commit c016a11

Please sign in to comment.