Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement combined modified Gaussian function #1927

Merged
13 changes: 1 addition & 12 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,7 @@ __pycache__
!/app/api/src/data/.gitkeep
r5-cache

/app/api/src/cache/traveltime_matrices/walking/*
/app/api/src/cache/traveltime_matrices/cycling/*
/app/api/src/cache/traveltime_matrices/public_transport/*
!/app/api/src/cache/traveltime_matrices/walking/standard/.gitkeep
!/app/api/src/cache/traveltime_matrices/cycling/.gitkeep
!/app/api/src/cache/traveltime_matrices/public_transport/.gitkeep
/app/api/src/cache/opportunity_matrices/walking/*
/app/api/src/cache/opportunity_matrices/cycling/*
/app/api/src/cache/opportunity_matrices/public_transport/*
!/app/api/src/cache/opportunity_matrices/walking/standard/.gitkeep
!/app/api/src/cache/opportunity_matrices/cycling/.gitkeep
!/app/api/src/cache/opportunity_matrices/public_transport/.gitkeep
/app/api/src/cache/

# Unit test / coverage reports
.coverage
Expand Down
1 change: 0 additions & 1 deletion app/api/.vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"stopOnEntry": true,
"justMyCode": false
},
{
Expand Down
87 changes: 53 additions & 34 deletions app/api/src/core/heatmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,15 +65,14 @@ def medians(sorted_table, unique):
if not sorted_table.size:
return None
travel_times = sorted_table.transpose()[1]
unique_index = unique[1]
medians = np.empty(unique_index.shape[0], np.float32)
# Add the last index to the unique index:
unique_index = np.append(unique[1], sorted_table.shape[0])
medians = np.empty(unique[1].shape[0], np.float32)
for i in range(unique_index.shape[0] - 1):
j = i + 1
travel_time = travel_times[unique_index[i] : unique_index[j]]
medians[i] = np.median(travel_time)
else:
travel_time = travel_times[unique_index[i + 1] :]
medians[i + 1] = np.median(travel_time)

return medians


Expand All @@ -95,14 +94,13 @@ def mins(sorted_table, unique):
if not sorted_table.size:
return None
travel_times = sorted_table.transpose()[1]
unique_index = unique[1]
mins = np.empty(unique_index.shape[0], np.float32)
# Add the last index to the unique index:
unique_index = np.append(unique[1], sorted_table.shape[0])
mins = np.empty(unique[1].shape[0], np.float32)
for i in range(unique_index.shape[0] - 1):
travel_time = travel_times[unique_index[i] : unique_index[i + 1]]
mins[i] = np.min(travel_time)
else:
travel_time = travel_times[unique_index[i + 1] :]
mins[i + 1] = np.min(travel_time)

return mins


Expand All @@ -123,14 +121,13 @@ def counts(sorted_table, unique):
if not sorted_table.size:
return None
travel_times = sorted_table.transpose()[1]
unique_index = unique[1]
counts = np.empty(unique_index.shape[0], np.float32)
# Add the last index to the unique index:
unique_index = np.append(unique[1], sorted_table.shape[0])
counts = np.empty(unique[1].shape[0], np.float32)
for i in range(unique_index.shape[0] - 1):
travel_time = travel_times[unique_index[i] : unique_index[i + 1]]
counts[i] = travel_time.shape[0]
else:
travel_time = travel_times[unique_index[i + 1] :]
counts[i + 1] = travel_time.shape[0]

return counts


Expand All @@ -151,47 +148,69 @@ def averages(sorted_table, unique):
if not sorted_table.size:
return None
travel_times = sorted_table.transpose()[1]
unique_index = unique[1]
averages = np.empty(unique_index.shape[0], np.float32)
# Add the last index to the unique index:
unique_index = np.append(unique[1], sorted_table.shape[0])

averages = np.empty(unique[1].shape[0], np.float32)
for i in range(unique_index.shape[0] - 1):
travel_time = travel_times[unique_index[i] : unique_index[i + 1]]
averages[i] = np.average(travel_time)
else:
travel_time = travel_times[unique_index[i + 1] :]
averages[i + 1] = np.average(travel_time)

return averages


@njit
def modified_gaussian_per_grid(sorted_table, unique, sensitivity, cutoff):
def combined_modified_gaussian_per_grid(
sorted_table, unique, sensitivity, cutoff, static_traveltime
):
if not sorted_table.size:
return None
sensitivity_ = sensitivity / (60 * 60) # convert sensitivity to minutes
travel_times = sorted_table.transpose()[1]
unique_index = unique[1]
modified_gaussian_per_grids = np.empty(unique_index.shape[0], np.float64)
# Add the last index to the unique index:
unique_index = np.append(unique[1], sorted_table.shape[0])
combined_modified_gaussian_per_grids = np.empty(unique[1].shape[0], np.float64)
for i in range(unique_index.shape[0] - 1):
travel_time = travel_times[unique_index[i] : unique_index[i + 1]]
sum = 0
for t in travel_time:
f = exp(-t * t / sensitivity)
if t > cutoff:
# Assume result is 0
continue
if t <= static_traveltime:
f = 1
else:
t = t - static_traveltime
f = exp(-t * t / sensitivity_)
sum += f
if sum >= cutoff:
modified_gaussian_per_grids[i] = 0
break
else:
modified_gaussian_per_grids[i] = sum
combined_modified_gaussian_per_grids[i] = sum

else:
travel_time = travel_times[unique_index[i + 1] :]
return combined_modified_gaussian_per_grids


@njit
def modified_gaussian_per_grid(sorted_table, unique, sensitivity, cutoff):
if not sorted_table.size:
return None

sensitivity_ = sensitivity / (60 * 60) # convert sensitivity to minutes
travel_times = sorted_table.transpose()[1]
# Add the last index to the unique index:
unique_index = np.append(unique[1], sorted_table.shape[0])
modified_gaussian_per_grids = np.empty(unique[1].shape[0], np.float64)
for i in range(unique_index.shape[0] - 1):
travel_time = travel_times[unique_index[i] : unique_index[i + 1]]
sum = 0
for t in travel_time:
f = exp(-t * t / sensitivity)
if t > cutoff:
# Assume result is 0
continue
f = exp(-t * t / sensitivity_)
sum += f
if sum >= cutoff:
modified_gaussian_per_grids[i] = 0
break
else:
modified_gaussian_per_grids[i] = sum

return modified_gaussian_per_grids


Expand Down
18 changes: 14 additions & 4 deletions app/api/src/crud/crud_read_heatmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,20 +474,30 @@ def do_calculations(self, sorted_table: dict, uniques: dict, heatmap_settings: d
"""

method_map = {
"gravity": "modified_gaussian_per_grid",
"modified_gaussian": "modified_gaussian_per_grid",
"combined_cumulative_modified_gaussian": "combined_modified_gaussian_per_grid",
"connectivity": "connectivity",
"cumulative": "counts",
"closest_average": "mins",
}
output = {}
if heatmap_settings.heatmap_type.value == "gravity":
if heatmap_settings.heatmap_type.value == "modified_gaussian":
for key, heatmap_config in heatmap_settings.heatmap_config.items():
output[key] = heatmap_core.modified_gaussian_per_grid(
sorted_table[key],
uniques[key],
heatmap_config["sensitivity"],
heatmap_config["max_traveltime"],
)
elif heatmap_settings.heatmap_type.value == "combined_cumulative_modified_gaussian":
for key, heatmap_config in heatmap_settings.heatmap_config.items():
output[key] = heatmap_core.combined_modified_gaussian_per_grid(
sorted_table[key],
uniques[key],
heatmap_config["sensitivity"],
heatmap_config["max_traveltime"],
heatmap_config["static_traveltime"],
)
else:
method_name = method_map[heatmap_settings.heatmap_type.value]
method = getattr(heatmap_core, method_name)
Expand Down Expand Up @@ -665,11 +675,11 @@ def generate_final_geojson(
for key, calculation in calculations.items():
if not calculation.size:
feature["properties"][key] = None
feature["properties"][key + "_class"] = -1
feature["properties"][key + "_class"] = 0
continue
if np.isnan(calculation[i]):
feature["properties"][key] = None
feature["properties"][key + "_class"] = -1
feature["properties"][key + "_class"] = 0
continue
feature["properties"][key] = round(float(calculation[i]), 2)
feature["properties"][key + "_class"] = int(quantiles[key][i])
Expand Down
62 changes: 53 additions & 9 deletions app/api/src/schemas/heatmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ class HeatmapMode(Enum):


class HeatmapType(Enum):
gravity = "gravity"
modified_gaussian = "modified_gaussian"
combined_cumulative_modified_gaussian = "combined_cumulative_modified_gaussian"
connectivity = "connectivity"
cumulative = "cumulative"
closest_average = "closest_average"
Expand Down Expand Up @@ -72,6 +73,10 @@ class HeatmapConfigGravity(HeatmapBase):
sensitivity: int


class HeatmapConfigCombinedGravity(HeatmapConfigGravity):
static_traveltime: int


class HeatmapClosestAverage(HeatmapBase):
max_count: int

Expand Down Expand Up @@ -107,7 +112,7 @@ class HeatmapSettings(BaseModel):
)
analysis_unit_size: Optional[int] = Field(10, description="Size of the analysis")
heatmap_type: HeatmapType = Field(
HeatmapType.gravity, description="Type of heatmap to compute"
HeatmapType.modified_gaussian, description="Type of heatmap to compute"
)
heatmap_config: dict

Expand All @@ -117,7 +122,8 @@ def heatmap_config_schema(cls, value, values):
Validate each part of heatmap_config against validator class corresponding to heatmap_type
"""
validator_classes = {
"gravity": HeatmapConfigGravity,
"modified_gaussian": HeatmapConfigGravity,
"combined_cumulative_modified_gaussian": HeatmapConfigCombinedGravity,
"closest_average": HeatmapClosestAverage,
}

Expand Down Expand Up @@ -150,7 +156,7 @@ def pass_poi_to_heatmap_config(cls, value):
}

request_examples = {
"gravity_hexagon_10": {
"modified_gaussian_hexagon_10": {
"summary": "Gravity heatmap with hexagon resolution 10",
"value": {
"mode": "walking",
Expand All @@ -161,7 +167,7 @@ def pass_poi_to_heatmap_config(cls, value):
"id": 1,
"name": "default",
},
"heatmap_type": "gravity",
"heatmap_type": "modified_gaussian",
"analysis_unit": "hexagon",
"resolution": 10,
"heatmap_config": {
Expand All @@ -173,7 +179,7 @@ def pass_poi_to_heatmap_config(cls, value):
},
},
},
"gravity_hexagon_9": {
"modified_gaussian_hexagon_9": {
"summary": "Gravity heatmap with hexagon resolution 9",
"value": {
"mode": "walking",
Expand All @@ -184,7 +190,7 @@ def pass_poi_to_heatmap_config(cls, value):
"id": 1,
"name": "default",
},
"heatmap_type": "gravity",
"heatmap_type": "modified_gaussian",
"analysis_unit": "hexagon",
"resolution": 9,
"heatmap_config": {
Expand All @@ -196,7 +202,7 @@ def pass_poi_to_heatmap_config(cls, value):
},
},
},
"gravity_hexagon_6": {
"modified_gaussian_hexagon_6": {
"summary": "Gravity heatmap with hexagon resolution 6",
"value": {
"mode": "walking",
Expand All @@ -207,7 +213,7 @@ def pass_poi_to_heatmap_config(cls, value):
"id": 1,
"name": "default",
},
"heatmap_type": "gravity",
"heatmap_type": "modified_gaussian",
"analysis_unit": "hexagon",
"resolution": 6,
"heatmap_config": {
Expand All @@ -219,6 +225,44 @@ def pass_poi_to_heatmap_config(cls, value):
},
},
},
"combined_modified_gaussian_hexagon_6": {
"summary": "Combined Gravity heatmap with hexagon resolution 6",
"value": {
"mode": "walking",
"study_area_ids": [91620000],
"max_travel_time": 20,
"walking_profile": "standard",
"scenario": {
"id": 1,
"name": "default",
},
"heatmap_type": "combined_cumulative_modified_gaussian",
"analysis_unit": "hexagon",
"resolution": 6,
"heatmap_config": {
"poi": {
"atm": {
"weight": 1,
"sensitivity": 250000,
"max_traveltime": 20,
"static_traveltime": 5,
},
"bar": {
"weight": 1,
"sensitivity": 250000,
"max_traveltime": 20,
"static_traveltime": 5,
},
"gym": {
"weight": 1,
"sensitivity": 350000,
"max_traveltime": 20,
"static_traveltime": 5,
},
},
},
},
},
"closest_average_hexagon_10": {
"summary": "Closest average heatmap with hexagon resolution 10",
"value": {
Expand Down