nearly there :)

parent 8b0a3d5b
......@@ -86,6 +86,23 @@ Would overpass be able to do this?
### Running Dutch municipality merge of 2018
Get the latest dataset from the server, get it to your dev environment
```
# DEVELOPMENT ONLY: failmap create_dataset -o -> dataset_12mar2018.json
```
Place the dataset in the fixture folder.
Then flush the database:
```
# DEVELOPMENT ONLY: failmap clear_database
```
Load the data into the application
```
# DEVELOPMENT ONLY: failmap load_dataset dataset_12mar2018.json
```
First make sure the organizations have been created and merged.
```
......@@ -94,7 +111,12 @@ failmap merge_organizations_2018
Then import the new regions for these organizations:
```
failmap update_coordinates
failmap update_coordinates --date=2018-01-01
```
The new organizations (of course) do not have a rating, the rating needs to be rebuilt:
```
failmap rebuild_ratings
```
......
......@@ -25,7 +25,7 @@ resampling_resolutions = {
}
@transaction.atomic
def update_coordinates(country: str = "NL", organization_type: str="municipality"):
def update_coordinates(country: str = "NL", organization_type: str="municipality", when=None):
log.info("Attempting to update coordinates for: %s %s " % (country, organization_type))
......@@ -38,18 +38,18 @@ def update_coordinates(country: str = "NL", organization_type: str="municipality
log.info("Parsing features:")
for feature in data["features"]:
if "properties" not in feature.keys():
if "properties" not in feature:
log.debug("Feature misses 'properties' property :)")
continue
if "name" not in feature["properties"].keys():
if "name" not in feature["properties"]:
log.debug("This feature does not contain a name: it might be metadata or something else.")
continue
resolution = resampling_resolutions.get(country, {}).get(organization_type, 0.001)
task = (resample.s(feature, resolution) | store_updates.s(country, organization_type))
task = (resample.s(feature, resolution) | store_updates.s(country, organization_type, when))
task.apply_async()
log.info("Resampling and update tasks have been created.")
......@@ -85,7 +85,7 @@ def resample(feature: Dict, resampling_resolution: float=0.001):
@app.task
def store_updates(feature: Dict, country: str="NL", organization_type: str="municipality"):
def store_updates(feature: Dict, country: str="NL", organization_type: str="municipality", when=None):
properties = feature["properties"]
coordinates = feature["geometry"]
......@@ -114,12 +114,10 @@ def store_updates(feature: Dict, country: str="NL", organization_type: str="muni
type__name=organization_type,
is_dead=False)
except Organization.DoesNotExist:
log.info("Organization from OSM does not exist in failmap, create it using the admin interface: '%s'" %
properties["name"])
log.info("This might happen with neighboring countries (and the antilles for the Netherlands) or new regions.")
log.info("If you are missing regions: did you create them in the admin interface or with an organization "
"merge script?")
log.info("Developers might experience this error using testdata etc.")
log.info("Organization from OSM does not exist in failmap, create it using the admin interface: '%s' "
"This might happen with neighboring countries (and the antilles for the Netherlands) or new regions."
"If you are missing regions: did you create them in the admin interface or with an organization "
"merge script? Developers might experience this error using testdata etc.", properties["name"])
log.info(properties)
return
......@@ -155,12 +153,12 @@ def store_updates(feature: Dict, country: str="NL", organization_type: str="muni
for old_coord in old_coordinate:
old_coord.is_dead = True
old_coord.is_dead_since = datetime.now(pytz.utc)
old_coord.is_dead_since = when if when else datetime.now(pytz.utc)
old_coord.is_dead_reason = message
old_coord.save()
new_coordinate = Coordinate()
new_coordinate.created_on = datetime.now(pytz.utc)
new_coordinate.created_on = when if when else datetime.now(pytz.utc)
new_coordinate.organization = matching_organization
new_coordinate.creation_metadata = "Automated import via OSM."
new_coordinate.geojsontype = coordinates["type"] # polygon or multipolygon
......@@ -180,7 +178,7 @@ def get_osm_data(country: str= "NL", organization_type: str= "municipality"):
:return: dictionary
"""
filename = "%s_%s_%s.osm" % (country, organization_type, datetime.now().date())
filename = "%s_%s_%s.osm" % (country, organization_type, datetime.now(pytz.utc).date())
filename = settings.TOOLS['openstreetmap']['output_dir'] + filename
# to test this, without connecting to a server but handle the data returned today(!)
......
......@@ -2,6 +2,9 @@ import logging
from django.core.management.base import BaseCommand
from datetime import datetime
from argparse import ArgumentTypeError
from ...geojson import update_coordinates
log = logging.getLogger(__package__)
......@@ -10,8 +13,21 @@ log = logging.getLogger(__package__)
class Command(BaseCommand):
help = "Connects to OSM and gets a set of coordinates."
def add_arguments(self, parser):
parser.add_argument("--date",
help="Date since when the import should be effective. - format YYYY-MM-DD",
required=False,
type=valid_date)
# https://nl.wikipedia.org/wiki/Gemeentelijke_herindelingen_in_Nederland#Komende_herindelingen
# Running this every month is fine too :)
def handle(self, *app_labels, **options):
# trace = input()
update_coordinates()
update_coordinates(when=options["date"])
def valid_date(s):
try:
return datetime.strptime(s, "%Y-%m-%d")
except ValueError:
msg = "Not a valid date: '{0}'.".format(s)
raise ArgumentTypeError(msg)
......@@ -700,9 +700,9 @@ def map_data(request, weeks_back=0):
(SELECT id as stacked_organization_id
FROM organization stacked_organization
WHERE (stacked_organization.created_on <= '%s' AND stacked_organization.is_dead == 0)
OR
(stacked_organization.created_on <= '%s' AND stacked_organization.is_dead == 1 AND
stacked_organization.is_dead_since >= '%s')) as organization_stack
OR (
'%s' BETWEEN stacked_organization.created_on AND stacked_organization.is_dead_since
AND stacked_organization.is_dead == 1)) as organization_stack
ON organization_stack.stacked_organization_id = map_organizationrating.organization_id
INNER JOIN
......@@ -716,9 +716,8 @@ def map_data(request, weeks_back=0):
FROM coordinate stacked_coordinate
WHERE (stacked_coordinate.created_on <= '%s' AND stacked_coordinate.is_dead == 0)
OR
(stacked_coordinate.created_on <= '%s' AND stacked_coordinate.is_dead == 1 AND
stacked_coordinate.is_dead_since >= '%s')
GROUP BY area) as coordinate_stack
('%s' BETWEEN stacked_coordinate.created_on AND stacked_coordinate.is_dead_since
AND stacked_coordinate.is_dead == 1) GROUP BY area, organization_id) as coordinate_stack
ON coordinate_stack.organization_id = map_organizationrating.organization_id
INNER JOIN
......@@ -728,7 +727,7 @@ def map_data(request, weeks_back=0):
GROUP BY coordinate_stack.area, organization.name
ORDER BY `when` ASC
''' % (when, when, when, when, when, when, when, )
''' % (when, when, when, when, when, )
print(sql)
# with the new solution, you only get just ONE area result per organization... -> nope, group by area :)
......
......@@ -146,7 +146,7 @@ class OrganizationAdmin(ActionMixin, ImportExportModelAdmin, admin.ModelAdmin):
list_display = ('name_details', 'type', 'country', 'created_on', 'is_dead')
search_fields = (['name', 'country', 'type__name'])
list_filter = ('name', 'type__name', 'country') # todo: type is now listed as name, confusing
fields = ('name', 'type', 'country', 'twitter_handle', 'is_dead', 'is_dead_since', 'is_dead_reason')
fields = ('name', 'type', 'country', 'twitter_handle', 'created_on', 'is_dead', 'is_dead_since', 'is_dead_reason')
inlines = [UrlAdminInline, CoordinateAdminInline, OrganizationRatingAdminInline, PromiseAdminInline] #
......@@ -360,12 +360,11 @@ class OrganizationTypeAdmin(ImportExportModelAdmin, admin.ModelAdmin):
fields = ('name', )
class CoordinateAdmin(admin.ModelAdmin):
list_display = ('organization', 'geojsontype', 'created_on', 'is_dead', 'is_dead_since')
search_fields = ('organization__name', 'geojsontype')
list_filter = ('organization', 'geojsontype')
fields = ('organization', 'geojsontype', 'area')
fields = ('organization', 'created_on', 'is_dead', 'is_dead_since', 'is_dead_reason', 'geojsontype', 'area')
class PromiseAdmin(ImportExportModelAdmin, admin.ModelAdmin):
......
This diff is collapsed.
......@@ -32,19 +32,32 @@ class Command(DumpDataCommand):
"""
De gemeenten Menaldumadeel, Franekeradeel en Het Bildt zullen opgaan in een nieuwe gemeente Waadhoeke.
"""
merge(["Menaldumadeel", "Franekeradeel", "Het Bildt"], "Waadhoeke", merge_date)
#We use the frysian name: Menameradiel instead of Menaldumadeel
# we should probably change that here, so the update_coordinates also works.
merge(source_organizations_names=["Menameradiel", "Franekeradeel", "Het Bildt"],
target_organization_name="Waadhoeke",
when=merge_date,
organization_type="municipality",
country="NL"
)
"""
Ook de dorpen Welsrijp, Winsum, Baijum en Spannum van gemeente Littenseradiel,
sluiten zich bij deze nieuwe gemeente aan.
"""
# todo: do a geographic move. This is done via "update_coordinates".
# todo: do a geographic move. This is done via "update_coordinates"
# todo: add the geographic updates using update_coordinates on a certain date...
"""
De gemeenteraad van Leeuwarderadeel heeft na een referendum in maart 2013 besloten dat de gemeente zal
opgaan in de gemeente Leeuwarden.
"""
merge(["Leeuwarderadeel"], "Leeuwarden", merge_date)
merge(source_organizations_names=["Leeuwarderadeel"],
target_organization_name="Leeuwarden",
when=merge_date,
organization_type="municipality",
country="NL")
"""
Ook zullen tien dorpen van Littenseradiel aan de nieuwe fusiegemeente toegevoegd worden.
......@@ -60,14 +73,22 @@ class Command(DumpDataCommand):
De gemeenten Rijnwaarden en Zevenaar hebben in mei 2016 besloten om te fuseren tot de nieuwe gemeente Zevenaar.
"""
# straightforward geographic move
merge(["Rijnwaarden"], "Zevenaar", merge_date)
merge(source_organizations_names=["Rijnwaarden"],
target_organization_name="Zevenaar",
when=merge_date,
organization_type="municipality",
country="NL")
"""
De gemeenten Vlagtwedde en Bellingwedde hebben in oktober 2015 besloten om te fuseren tot de
nieuwe gemeente Westerwolde.
"""
# straightforward geographic move
merge(["Vlagtwedde", "Bellingwedde"], "Westerwolde", merge_date)
merge(source_organizations_names=["Vlagtwedde", "Bellingwedde"],
target_organization_name="Westerwolde",
when=merge_date,
organization_type="municipality",
country="NL")
"""
De gemeenten Hoogezand-Sappemeer, Menterwolde en Slochteren hebben in november 2015 besloten om te fuseren
......@@ -75,7 +96,11 @@ class Command(DumpDataCommand):
"""
# straightforward geographic move
merge(["Hoogezand-Sappemeer", "Menterwolde", "Slochteren"], "Midden-Groningen", merge_date)
merge(source_organizations_names=["Hoogezand-Sappemeer", "Menterwolde", "Slochteren"],
target_organization_name="Midden-Groningen",
when=merge_date,
organization_type="municipality",
country="NL")
# implies that name + country + organization_type is unique.
......@@ -177,18 +202,32 @@ def merge(source_organizations_names: List[str], target_organization_name: str,
for source_organizations_name in source_organizations_names:
log.info("Trying to add %s to the merge with %s." % (source_organizations_name, new_organization))
source_organization = Organization.objects.get(
name=source_organizations_name,
country=country,
type=type,
is_dead=False)
try:
source_organization = Organization.objects.get(
name=source_organizations_name,
country=country,
type=type,
is_dead=False)
except Organization.DoesNotExist:
# New organizations don't exist... so nothing to migrate.
log.exception("Organization %s does not exist. Tried to merge it with %s. Are you using a different "
"translation for this organization (for example a local dialect)?",
source_organizations_name, target_organization_name)
# copy todays used coordinates from all to-be-merged organizations into the target
for coordinate in Coordinate.objects.all().filter(organization=source_organization, is_dead=False):
cloned_coordinate = deepcopy(coordinate)
cloned_coordinate.id = None
cloned_coordinate.created_on = when
cloned_coordinate.organization = new_organization
cloned_coordinate.save()
# should the original coordinate die now? As it has been superseeded.
# Would otherwise retrieve multiple coordinates.
coordinate.is_dead = True
coordinate.is_dead_since = when
coordinate.is_dead_reason = "Merged with %s" % new_organization
# still active promises
for promise in Promise.objects.all().filter(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment