Skip to content

Commit 19a7d69

Browse files
committed
Merge remote-tracking branch 'frodrigo/master'
* frodrigo/master: Fix downloader in case of excaption Fix tests on action table Stop psql script on error Allow 'snc' as housenumber in Italy into TagFix_Housenumber py3: analyser_merge_restaurant_FR_aquitaine py3: fix Analyser_Merge Update opendata analyser_merge_bicycle_parking_FR_bordeaux Switch data format to geojson in analyser_merge_bicycle_rental_FR_IDF Add mapccs plugin Josm_FranceSpecificRules #497 mapcss: rename assertMatchWithContext to -osmoseAssertMatchWithContext Fix analyser_merge_school_FR for py2
2 parents 55bed1a + c434a74 commit 19a7d69

17 files changed

+155
-37
lines changed

analysers/Analyser_Merge.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -445,15 +445,15 @@ def __init__(self, source, extractor = lambda json: json):
445445
self.json = None
446446

447447
def header(self):
448-
self.json = map(flattenjson, self.extractor(json.loads(self.source.open().read())))
448+
self.json = list(map(flattenjson, self.extractor(json.loads(self.source.open().read()))))
449449
return self.json[0].keys()
450450

451451
def import_(self, table, srid, osmosis):
452452
self.json = self.json or map(flattenjson, self.extractor(json.loads(self.source.open().read())))
453453
for row in self.json:
454454
osmosis.giscurs.execute(u"insert into \"%s\" (\"%s\") values (%s)" %
455455
(table, u'", "'.join(row.keys()), (u'%s, ' * len(row.keys()))[:-2]),
456-
map(removequotesjson, row.values()))
456+
list(map(removequotesjson, row.values())))
457457

458458
class GeoJSON(Parser):
459459
def __init__(self, source, extractor = lambda json: json):
@@ -469,7 +469,7 @@ def __init__(self, source, extractor = lambda json: json):
469469

470470
def header(self):
471471
self.json = self.extractor(json.loads(self.source.open().read()))
472-
columns = flattenjson(self.json['features'][0]['properties']).keys()
472+
columns = list(flattenjson(self.json['features'][0]['properties']).keys())
473473
columns.append(u"geom_x")
474474
columns.append(u"geom_y")
475475
return columns
@@ -479,7 +479,7 @@ def import_(self, table, srid, osmosis):
479479
insert_statement = u"insert into %s (%%s) values %%s" % table
480480
for row in self.json['features']:
481481
row['properties'] = flattenjson(row['properties'])
482-
columns = row['properties'].keys()
482+
columns = list(row['properties'].keys())
483483
values = map(removequotesjson, map(lambda column: row['properties'][column], columns))
484484
columns.append(u"geom_x")
485485
columns.append(u"geom_y")

analysers/analyser_merge_bicycle_parking_FR_bordeaux.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,14 +28,13 @@ def __init__(self, config, logger = None):
2828
self.missing_official = {"item":"8150", "class": 1, "level": 3, "tag": ["merge", "public equipment", "cycle"], "desc": T_(u"Bordeaux bicycle parking not integrated") }
2929
Analyser_Merge.__init__(self, config, logger,
3030
u"http://opendata.bordeaux.fr/content/mobiliers-urbains-stationnement-2roues",
31-
u"Mobiliers urbains : Stationnement vélo",
32-
CSV(Source(attribution = u"Ville de Bordeaux", millesime = "01/2016",
33-
fileUrl = u"http://opendatabdx.cloudapp.net/DataBrowser/DownloadCsv?container=databordeaux&entitySet=sigstavelo&filter=NOFILTER"),
34-
separator = u";"),
35-
Load("X_LONG", "Y_LAT",
31+
u"Mobiliers urbains : Stationnement deux-roues",
32+
CSV(Source(attribution = u"Ville de Bordeaux", millesime = "01/2019",
33+
fileUrl = u"http://opendatabdx.cloudapp.net/DataBrowser/DownloadCsv?container=databordeaux&entitySet=sigstavelo&filter=NOFILTER")),
34+
Load("x_long", "y_lat",
3635
select = {
37-
"REALISATION": u"Réalisé",
38-
"NATURE": [u"Arceau vélo", u"Rack", u"Potelet"]},
36+
"realisation": u"Réalisé",
37+
"nature": [u"Arceau vélo", u"Rack", u"Potelet"]},
3938
xFunction = self.float_comma,
4039
yFunction = self.float_comma),
4140
Mapping(
@@ -46,4 +45,4 @@ def __init__(self, config, logger = None):
4645
generate = Generate(
4746
static1 = {"amenity": "bicycle_parking"},
4847
static2 = {"source": self.source},
49-
mapping1 = {"capacity": lambda res: None if res["NOMBRE"] in (None, "0") else res["NOMBRE"] if res["NATURE"] == "Rack" else str(int(res["NOMBRE"])*2)} )))
48+
mapping1 = {"capacity": lambda res: None if res["nombre"] in (None, "0") else res["nombre"] if res["nombre"] == "Rack" else str(int(res["nombre"])*2)} )))

analysers/analyser_merge_bicycle_rental_FR_IDF.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
## ##
2121
###########################################################################
2222

23-
from .Analyser_Merge import Analyser_Merge, Source, SHP, Load, Mapping, Select, Generate
23+
from .Analyser_Merge import Analyser_Merge, Source, GeoJSON, Load, Mapping, Select, Generate
2424

2525

2626
class Analyser_Merge_Bicycle_Rental_FR_IDF(Analyser_Merge):
@@ -31,9 +31,9 @@ def __init__(self, config, logger = None):
3131
Analyser_Merge.__init__(self, config, logger,
3232
u"https://opendata.paris.fr/explore/dataset/velib-disponibilite-en-temps-reel/information/",
3333
u"Vélib' - Disponibilité temps réel",
34-
SHP(Source(attribution = u"Autolib Velib Métropole", millesime = "04/2019",
35-
fileUrl = u"https://opendata.paris.fr/explore/dataset/velib-disponibilite-en-temps-reel/download/?format=shp&timezone=Europe/Berlin", zip = "velib-disponibilite-en-temps-reel.shp")),
36-
Load(("ST_X(geom)", ), ("ST_Y(geom)", )),
34+
GeoJSON(Source(attribution = u"Autolib Velib Métropole", millesime = "04/2019",
35+
fileUrl = u"https://opendata.paris.fr/explore/dataset/velib-disponibilite-en-temps-reel/download/?format=geojson&timezone=Europe/Berlin")),
36+
Load("geom_x", "geom_y"),
3737
Mapping(
3838
select = Select(
3939
types = ["nodes", "ways"],
@@ -42,9 +42,9 @@ def __init__(self, config, logger = None):
4242
generate = Generate(
4343
static1 = {
4444
"amenity": "bicycle_rental",
45-
"network": "Vélib’",
45+
"network": u"Vélib’",
4646
"operator": "Smovengo"},
4747
static2 = {"source": self.source},
4848
mapping1 = {
49-
"name": "station_nam",
49+
"name": "station_name",
5050
"capacity": lambda res: res["nbedock"] if res["nbedock"] != "0" else None} )))

analysers/analyser_merge_restaurant_FR_aquitaine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def __init__(self, config, logger = None):
3535
Load("LON", "LAT",
3636
select = {
3737
'TYPRES': [u"Restaurant", u"Hôtel restaurant", u"Ferme auberge"],
38-
'CATRES': self.amenity_type.keys()},
38+
'CATRES': list(self.amenity_type.keys())},
3939
xFunction = self.degree,
4040
yFunction = self.degree),
4141
Mapping(

analysers/analyser_merge_school_FR.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def __init__(self, config, logger = None):
5656
u"https://www.data.gouv.fr/fr/datasets/adresse-et-geolocalisation-des-etablissements-denseignement-du-premier-et-second-degres-1/",
5757
u"Adresse et géolocalisation des établissements d'enseignement du premier et second degrés - " + officialName,
5858
CSV(Source(attribution = u"data.gouv.fr:Éducation Nationale", millesime = "03/2018",
59-
fileUrl = "https://data.education.gouv.fr/explore/dataset/fr-en-adresse-et-geolocalisation-etablissements-premier-et-second-degre/download?format=csv&timezone=Europe/Berlin&use_labels_for_header=true",
59+
fileUrl = u"https://data.education.gouv.fr/explore/dataset/fr-en-adresse-et-geolocalisation-etablissements-premier-et-second-degre/download?format=csv&timezone=Europe/Berlin&use_labels_for_header=true",
6060
filter = lambda t: t.replace("Ecole", u"École").replace("Saint ", "Saint-").replace("Sainte ", "Sainte-").replace(u"élementaire", u"élémentaire")),
6161
separator = u";"),
6262
Load("Position", "Position",

mapcss/item_map.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
#-*- coding: utf-8 -*-
22
item_map = \
3-
{'Rules_Brazilian-Specific': {'class': {'Brasil - Correções e melhorias': 9018006,
3+
{'FranceSpecificRules': {'class': {'a': 1},
4+
'item': 9999,
5+
'prefix': 'Josm_',
6+
'tags': []},
7+
'Rules_Brazilian-Specific': {'class': {'Brasil - Correções e melhorias': 9018006,
48
'Brasil - Verificar': 9018002,
59
'SAMU classificado de forma errada': 9018016,
610
'adicionar {0} ao {1}': 9018018,

mapcss/mapcss2osmose.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -208,9 +208,9 @@ def pseudo_class_righthandtraffic(t, c):
208208
'fixDeleteObject': 4,
209209
# test
210210
'assertMatch': 5,
211-
'assertMatchWithContext': 5,
211+
'-osmoseAssertMatchWithContext': 5,
212212
'assertNoMatch': 5,
213-
'assertNoMatchWithContext': 5,
213+
'-osmoseAssertNoMatchWithContext': 5,
214214
}
215215

216216
def rule_declarations_order(t, c):
@@ -449,7 +449,7 @@ def segregate_selectors_type(rules):
449449
out_rules[t].append(rule.copy())
450450
out_rules[t][-1]['selectors'] = out_selector[t]
451451
out_rules[t][-1]['declarations'] = list(filter(lambda d:
452-
not d['property'] or not d['property'].startswith('assert') or
452+
not d['property'] or not d['property'].startswith('assert') or not d['property'].startswith('-osmoseAssert') or
453453
(d['value']['type'] == 'single_value' and d['value']['value']['value'].startswith(t)) or
454454
(d['value']['type'] == 'declaration_value_function' and d['value']['params'][0]['value']['value'].startswith(t)),
455455
out_rules[t][-1]['declarations']))
@@ -623,7 +623,7 @@ def to_p(t):
623623
elif t['property'] == 'fixDeleteObject':
624624
# raise NotImplementedError(t['property'])
625625
fix['fixRemove'] == "*keys" # TODO delete completly the objet in place of remove all tags
626-
elif t['property'].startswith('assert'):
626+
elif t['property'].startswith('assert') or t['property'].startswith('-osmoseAssert'):
627627
if t['value']['type'] == 'single_value':
628628
what, context = (to_p(t['value']), None)
629629
else: # It's a list (we hope so)
@@ -709,7 +709,7 @@ def build_tests(tests):
709709
o, kvs = okvs[0], list(map(lambda a: a[0] in '"\'' and a[0] == a[-1] and a[1:-1] or a, map(lambda a: a.replace('\\\"', '"').replace("\\\'", "'"), okvs[1:])))
710710
kvs = zip(kvs[0::2], kvs[1::2]) # kvs.slice(2)
711711
tags = dict(kvs)
712-
test_code += ("self." + ("check_err" if test['type'].startswith('assertMatch') else "check_not_err") + "(" +
712+
test_code += ("self." + ("check_err" if test['type'].startswith('assertMatch') or test['type'].startswith('-osmoseAssertMatch') else "check_not_err") + "(" +
713713
"n." + o + "(data, {" + ', '.join(map(lambda kv: "u'" + kv[0].replace("'", "\\'") + "': u'" + kv[1].replace("'", "\\'") + "'", sorted(tags.items()))) + "}" + {'node': "", 'way': ", [0]", 'relation': ", []"}[o] + "), " +
714714
"expected={'class': " + str(test['class']) + ", 'subclass': " + str(test['subclass']) + "})")
715715
out.append(test_code)

modules/OsmOsisManager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,14 +91,14 @@ def osmosis_close(self):
9191
def psql_c(self, sql):
9292
cmd = ["psql"]
9393
cmd += self.db_psql_args
94-
cmd += ["-c", sql]
94+
cmd += ["-c", sql, "-v", "ON_ERROR_STOP=1"]
9595
self.logger.execute_out(cmd)
9696

9797

9898
def psql_f(self, script, cwd=None):
9999
cmd = ["psql"]
100100
cmd += self.db_psql_args
101-
cmd += ["-f", script]
101+
cmd += ["-f", script, "-v", "ON_ERROR_STOP=1"]
102102
self.logger.execute_out(cmd, cwd=cwd)
103103

104104

modules/downloader.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,7 @@ def update_cache(url, delay, get=get):
8585
answer.raise_for_status()
8686

8787
# write the file
88+
outfile = None
8889
try:
8990
outfile = open(tmp_file, "wb")
9091
for data in answer.iter_content(chunk_size=None):

osmose_config.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,12 +56,14 @@ class template_config:
5656
dir_scripts + "/osmosis/CreateFunctions.sql",
5757
]
5858
osmosis_change_init_post_scripts = [ # Scripts to run on database initialisation
59+
dir_scripts + "/osmosis/pgsimple_schema_0.6_action_drop.sql",
5960
dir_scripts + "/osmosis/osmosis-0.47/script/pgsnapshot_schema_0.6_action.sql",
6061
]
6162
osmosis_change_post_scripts = [ # Scripts to run each time the database is updated
6263
dir_scripts + "/osmosis/CreateTouched.sql",
6364
]
6465
osmosis_resume_init_post_scripts = [ # Scripts to run on database initialisation
66+
dir_scripts + "/osmosis/pgsimple_schema_0.6_action_drop.sql",
6567
dir_scripts + "/osmosis/osmosis-0.47/script/pgsnapshot_schema_0.6_action.sql",
6668
]
6769
osmosis_resume_post_scripts = [ # Scripts to run each time the database is updated
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
DROP TABLE IF EXISTS actions;

plugins/Josm_FranceSpecificRules.py

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
#-*- coding: utf-8 -*-
2+
from __future__ import unicode_literals
3+
import modules.mapcss_lib as mapcss
4+
import regex as re
5+
6+
from plugins.Plugin import Plugin, with_options
7+
8+
class Josm_FranceSpecificRules(Plugin):
9+
10+
11+
def init(self, logger):
12+
Plugin.init(self, logger)
13+
tags = capture_tags = {}
14+
self.errors[21600] = {'item': 2160, 'level': 3, 'tag': mapcss.list_(u'tag', u'railway'), 'desc': mapcss.tr(u'Tag gauge manquant sur rail')}
15+
16+
17+
18+
def way(self, data, tags, nds):
19+
capture_tags = {}
20+
keys = tags.keys()
21+
err = []
22+
23+
24+
# way[railway=rail][!gauge][inside("FR")]
25+
if (u'railway' in keys):
26+
match = False
27+
if not match:
28+
capture_tags = {}
29+
try: match = (mapcss._tag_capture(capture_tags, 0, tags, u'railway') == mapcss._value_capture(capture_tags, 0, u'rail') and not mapcss._tag_capture(capture_tags, 1, tags, u'gauge') and mapcss.inside(self.father.config.options, u'FR'))
30+
except mapcss.RuleAbort: pass
31+
if match:
32+
# -osmoseTags:list("tag","railway")
33+
# -osmoseItemClassLevel:"2160/21600/3"
34+
# throwWarning:tr("Tag gauge manquant sur rail")
35+
# suggestAlternative:"gauge"
36+
# -osmoseAssertNoMatchWithContext:list("way railway=disused","inside=FR")
37+
# -osmoseAssertNoMatchWithContext:list("way railway=rail gauge=1435","inside=FR")
38+
# -osmoseAssertMatchWithContext:list("way railway=rail","inside=FR")
39+
err.append({'class': 21600, 'subclass': 0, 'text': mapcss.tr(u'Tag gauge manquant sur rail')})
40+
41+
return err
42+
43+
44+
from plugins.Plugin import TestPluginCommon
45+
46+
47+
class Test(TestPluginCommon):
48+
def test(self):
49+
n = Josm_FranceSpecificRules(None)
50+
class _config:
51+
options = {"country": None, "language": None}
52+
class father:
53+
config = _config()
54+
n.father = father()
55+
n.init(None)
56+
data = {'id': 0, 'lat': 0, 'lon': 0}
57+
58+
with with_options(n, {'country': 'FR'}):
59+
self.check_not_err(n.way(data, {u'railway': u'disused'}, [0]), expected={'class': 21600, 'subclass': 0})
60+
with with_options(n, {'country': 'FR'}):
61+
self.check_not_err(n.way(data, {u'gauge': u'1435', u'railway': u'rail'}, [0]), expected={'class': 21600, 'subclass': 0})
62+
with with_options(n, {'country': 'FR'}):
63+
self.check_err(n.way(data, {u'railway': u'rail'}, [0]), expected={'class': 21600, 'subclass': 0})

plugins/Name_Cadastre_FR.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ class Name_Cadastre_FR(Plugin):
1010

1111
def init(self, logger):
1212
Plugin.init(self, logger)
13-
capture_tags = {}
13+
tags = capture_tags = {}
1414
self.errors[50801] = {'item': 5080, 'level': 1, 'tag': mapcss.list_(u'name', u'fix:chair'), 'desc': mapcss.tr(u'Hamlet or Locality name suffix Nord, Sud, Est, Ouest, Centre should be removed from Cadastre name. Place should be integrated only once.')}
1515

1616
self.re_422a87ff = re.compile(r'.+([Nn]ord|[Ss]ud$|[Ee]st|[Oo]uest|[Cc]entre)$')
@@ -37,8 +37,8 @@ def node(self, data, tags):
3737
if match:
3838
# -osmoseItemClassLevel:"5080/50801/1"
3939
# throwError:tr("Hamlet or Locality name suffix Nord, Sud, Est, Ouest, Centre should be removed from Cadastre name. Place should be integrated only once.")
40-
# assertNoMatchWithContext:list('node place=hamlet name="ZA Sud Loire"',"inside=FR")
41-
# assertMatchWithContext:list('node place=hamlet name=Montdésert-Sud',"inside=FR")
40+
# -osmoseAssertNoMatchWithContext:list('node place=hamlet name="ZA Sud Loire"',"inside=FR")
41+
# -osmoseAssertMatchWithContext:list('node place=hamlet name=Montdésert-Sud',"inside=FR")
4242
err.append({'class': 50801, 'subclass': 0, 'text': mapcss.tr(u'Hamlet or Locality name suffix Nord, Sud, Est, Ouest, Centre should be removed from Cadastre name. Place should be integrated only once.')})
4343

4444
return err
@@ -64,6 +64,8 @@ def way(self, data, tags, nds):
6464
if match:
6565
# -osmoseItemClassLevel:"5080/50801/1"
6666
# throwError:tr("Hamlet or Locality name suffix Nord, Sud, Est, Ouest, Centre should be removed from Cadastre name. Place should be integrated only once.")
67+
# -osmoseAssertNoMatchWithContext:list('node place=hamlet name="ZA Sud Loire"',"inside=FR")
68+
# -osmoseAssertMatchWithContext:list('node place=hamlet name=Montdésert-Sud',"inside=FR")
6769
err.append({'class': 50801, 'subclass': 0, 'text': mapcss.tr(u'Hamlet or Locality name suffix Nord, Sud, Est, Ouest, Centre should be removed from Cadastre name. Place should be integrated only once.')})
6870

6971
return err
@@ -87,3 +89,7 @@ class father:
8789
self.check_not_err(n.node(data, {u'name': u'ZA Sud Loire', u'place': u'hamlet'}), expected={'class': 50801, 'subclass': 0})
8890
with with_options(n, {'country': 'FR'}):
8991
self.check_err(n.node(data, {u'name': u'Montdésert-Sud', u'place': u'hamlet'}), expected={'class': 50801, 'subclass': 0})
92+
with with_options(n, {'country': 'FR'}):
93+
self.check_not_err(n.node(data, {u'name': u'ZA Sud Loire', u'place': u'hamlet'}), expected={'class': 50801, 'subclass': 0})
94+
with with_options(n, {'country': 'FR'}):
95+
self.check_err(n.node(data, {u'name': u'Montdésert-Sud', u'place': u'hamlet'}), expected={'class': 50801, 'subclass': 0})

plugins/Name_Cadastre_FR.validator.mapcss

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,6 @@ way[place=hamlet][name=~/.+([Nn]ord|[Ss]ud$|[Ee]st|[Oo]uest|[Cc]entre)$/][inside
3838
throwError: tr("Hamlet or Locality name suffix Nord, Sud, Est, Ouest, Centre should be removed from Cadastre name. Place should be integrated only once.");
3939
-osmoseItemClassLevel: "5080/50801/1";
4040

41-
assertMatchWithContext: list('node place=hamlet name=Montdésert-Sud', "inside=FR");
42-
assertNoMatchWithContext: list('node place=hamlet name="ZA Sud Loire"', "inside=FR");
41+
-osmoseAssertMatchWithContext: list('node place=hamlet name=Montdésert-Sud', "inside=FR");
42+
-osmoseAssertNoMatchWithContext: list('node place=hamlet name="ZA Sud Loire"', "inside=FR");
4343
}

0 commit comments

Comments
 (0)