diff --git a/assets/map_symbols/footway_categories.png b/assets/map_symbols/footway_categories.png
new file mode 100644
index 0000000..30db5f7
Binary files /dev/null and b/assets/map_symbols/footway_categories.png differ
diff --git a/assets/map_symbols/lit.png b/assets/map_symbols/lit.png
index 5fde8ba..fa82d41 100644
Binary files a/assets/map_symbols/lit.png and b/assets/map_symbols/lit.png differ
diff --git a/assets/map_symbols/smoothness.png b/assets/map_symbols/smoothness.png
index 52e6652..453b467 100644
Binary files a/assets/map_symbols/smoothness.png and b/assets/map_symbols/smoothness.png differ
diff --git a/assets/map_symbols/surface.png b/assets/map_symbols/surface.png
index ef7128a..3957df3 100644
Binary files a/assets/map_symbols/surface.png and b/assets/map_symbols/surface.png differ
diff --git a/assets/map_symbols/tactile_paving.png b/assets/map_symbols/tactile_paving.png
index 16f34d8..23c4f1d 100644
Binary files a/assets/map_symbols/tactile_paving.png and b/assets/map_symbols/tactile_paving.png differ
diff --git a/assets/map_symbols/traffic_calming.png b/assets/map_symbols/traffic_calming.png
index c844235..f3b89f3 100644
Binary files a/assets/map_symbols/traffic_calming.png and b/assets/map_symbols/traffic_calming.png differ
diff --git a/assets/map_symbols/wheelchair.png b/assets/map_symbols/wheelchair.png
index 5386554..e0a1ec4 100644
Binary files a/assets/map_symbols/wheelchair.png and b/assets/map_symbols/wheelchair.png differ
diff --git a/constants.py b/constants.py
index d1ff7e6..811668a 100644
--- a/constants.py
+++ b/constants.py
@@ -1,5 +1,6 @@
import sys, os
-sys.path.append('.')
+
+sys.path.append(".")
from config import *
@@ -13,7 +14,7 @@
# global max zoom level
max_zoom = 22
-data_format = '.parquet'
+data_format = ".parquet"
# node archives general paths
map_page_name = "./map.html"
@@ -22,123 +23,134 @@
boundaries_path = "./data/boundaries" + data_format
boundaries_geojson_path = "./data/boundaries.geojson"
boundaries_md_path = "./data/boundaries_md.json"
-workflows_path = '.github/workflows'
+workflows_path = ".github/workflows"
# data folderpaths:
-improper_geoms_folderpath = 'data/improper_geoms'
-disjointed_folderpath = 'data/disjointed'
-versioning_folderpath = 'data/versioning'
-other_footways_folderpath = 'data/other_footways'
-tiles_folderpath = 'data/tiles'
-vrts_folderpath = 'data/vrts'
+improper_geoms_folderpath = "data/improper_geoms"
+disjointed_folderpath = "data/disjointed"
+versioning_folderpath = "data/versioning"
+other_footways_folderpath = "data/other_footways"
+tiles_folderpath = "data/tiles"
+vrts_folderpath = "data/vrts"
other_footways_subcatecories = {
- 'stairways' : {'highway':['steps']},
- 'main_footways' : {'highway':['footway','living_street'],'foot':['designated'],'footway': ['alley','path','yes']},
- 'potential_footways' : {'highway':['path','track']},
- 'informal_footways' : {'foot':['yes','permissive']},
- 'pedestrian_areas' : {} #defined only by geometry type (Polygon,Multipolygon)
+ "stairways": {"highway": ["steps"]},
+ "main_footways": {
+ "highway": ["footway", "living_street"],
+ "foot": ["designated"],
+ "footway": ["alley", "path", "yes"],
+ },
+ "potential_footways": {"highway": ["path", "track"]},
+ "informal_footways": {"foot": ["yes", "permissive"]},
+ "pedestrian_areas": {}, # defined only by geometry type (Polygon,Multipolygon)
}
# establishing other footways geometry types, default is 'LineString'
-other_footways_geometry_types = {k:'LineString' for k, v in other_footways_subcatecories.items()}
-other_footways_geometry_types['pedestrian_areas'] = 'Polygon'
+other_footways_geometry_types = {
+ k: "LineString" for k, v in other_footways_subcatecories.items()
+}
+other_footways_geometry_types["pedestrian_areas"] = "Polygon"
data_layer_descriptions = {
- 'kerbs' : 'Access points in the kerb lane where the sidewalk and the road meet, along a crossing.',
- 'sidewalks' : 'A footway that is juxtaposed to a road, a type of sidepath.',
- 'crossings' : 'The line that allows pedestrians to cross some road.',
- 'other_footways' : {
- 'stairways' : 'Pathways composed of steps.',
- 'main_footways' : 'Pathways which main usage is pedestrian displacement.',
- 'potential_footways' : 'Pathways with vague description, generally usable for pedestrians, but sometimes not as its main or sole purpose, such as some rural tracks.',
- 'informal_footways' : 'Pathways that are not made for pedestrian usage, but they generally used due to the absence of proper footways.',
- 'pedestrian_areas' : 'Areas where pedestrians can generally displace freely in normal circumstances.'
- }
+ "kerbs": "Access points in the kerb lane where the sidewalk and the road meet, along a crossing.",
+ "sidewalks": "A footway that is juxtaposed to a road, a type of sidepath.",
+ "crossings": "The line that allows pedestrians to cross some road.",
+ "other_footways": {
+ "stairways": "Pathways composed of steps.",
+ "main_footways": "Pathways which main usage is pedestrian displacement.",
+ "potential_footways": "Pathways with vague description, generally usable for pedestrians, but sometimes not as its main or sole purpose, such as some rural tracks.",
+ "informal_footways": "Pathways that are not made for pedestrian usage, but they generally used due to the absence of proper footways.",
+ "pedestrian_areas": "Areas where pedestrians can generally displace freely in normal circumstances.",
+ },
}
# ogr2ogr path
-OGR2OGR_PATH = 'ogr2ogr'
+OGR2OGR_PATH = "ogr2ogr"
layer_tags_dict = {
- 'kerbs': {'kerb': ['lowered','raised','flush','rolled','no','yes'], 'barrier': ['kerb']},
- 'sidewalks': {'footway': ['sidewalk']},
- 'crossings': {'footway': ['crossing']},
- 'other_footways' : OTHER_FOOTWAY_RULES
- }
+ "kerbs": {
+ "kerb": ["lowered", "raised", "flush", "rolled", "no", "yes"],
+ "barrier": ["kerb"],
+ },
+ "sidewalks": {"footway": ["sidewalk"]},
+ "crossings": {"footway": ["crossing"]},
+ "other_footways": OTHER_FOOTWAY_RULES,
+}
layer_exclusion_tags = {
- 'kerbs': {},
- 'sidewalks': {},
- 'crossings': {},
- 'other_footways' : OTHER_FOOTWAY_EXCLUSION_RULES,
+ "kerbs": {},
+ "sidewalks": {},
+ "crossings": {},
+ "other_footways": OTHER_FOOTWAY_EXCLUSION_RULES,
}
bbox_as_list = ()
# data paths
-sidewalks_path = 'data/sidewalks' + data_format
-crossings_path = 'data/crossings' + data_format
-kerbs_path = 'data/kerbs' + data_format
-other_footways_path = 'data/other_footways' + data_format
+sidewalks_path = "data/sidewalks" + data_format
+crossings_path = "data/crossings" + data_format
+kerbs_path = "data/kerbs" + data_format
+other_footways_path = "data/other_footways" + data_format
-sidewalks_path_raw = 'data/sidewalks_raw' + data_format
-crossings_path_raw = 'data/crossings_raw' + data_format
-kerbs_path_raw = 'data/kerbs_raw' + data_format
-other_footways_path_raw = 'data/other_footways_raw' + data_format
+sidewalks_path_raw = "data/sidewalks_raw" + data_format
+crossings_path_raw = "data/crossings_raw" + data_format
+kerbs_path_raw = "data/kerbs_raw" + data_format
+other_footways_path_raw = "data/other_footways_raw" + data_format
-sidewalks_path_versioning = 'data/versioning/sidewalks_versioning.json'
-crossings_path_versioning = 'data/versioning/crossings_versioning.json'
-kerbs_path_versioning = 'data/versioning/kerbs_versioning.json'
-other_footways_path_versioning = 'data/versioning/other_footways_versioning.json'
+sidewalks_path_versioning = "data/versioning/sidewalks_versioning.json"
+crossings_path_versioning = "data/versioning/crossings_versioning.json"
+kerbs_path_versioning = "data/versioning/kerbs_versioning.json"
+other_footways_path_versioning = "data/versioning/other_footways_versioning.json"
# data quality jsons path
-feat_keys_path = 'quality_check/feature_keys.json'
-keys_without_wiki_path = 'quality_check/keys_without_wiki.json'
-unique_values_path = 'quality_check/unique_tag_values.json'
-valid_values_path = 'quality_check/valid_tag_values.json'
+feat_keys_path = "quality_check/feature_keys.json"
+keys_without_wiki_path = "quality_check/keys_without_wiki.json"
+unique_values_path = "quality_check/unique_tag_values.json"
+valid_values_path = "quality_check/valid_tag_values.json"
# node homepage:
-user_basepage_url = f'https://{USERNAME}.github.io/'
-node_homepage_url = f'https://{USERNAME}.github.io/{REPO_NAME}/'
-data_folder_url = f'https://{USERNAME}.github.io/{REPO_NAME}/data/'
-data_updating_url = f'https://{USERNAME}.github.io/{REPO_NAME}/data/data_updating.html'
+user_basepage_url = f"https://{USERNAME}.github.io/"
+node_homepage_url = f"https://{USERNAME}.github.io/{REPO_NAME}/"
+data_folder_url = f"https://{USERNAME}.github.io/{REPO_NAME}/data/"
+data_updating_url = f"https://{USERNAME}.github.io/{REPO_NAME}/data/data_updating.html"
# codebase as page:
-codebase_homepage = 'https://kauevestena.github.io/oswm_codebase/'
+codebase_homepage = "https://kauevestena.github.io/oswm_codebase/"
paths_dict = {
- 'data' :{
- 'sidewalks': sidewalks_path,
- 'crossings': crossings_path,
- 'kerbs': kerbs_path,
- 'other_footways' : other_footways_path
+ "data": {
+ "sidewalks": sidewalks_path,
+ "crossings": crossings_path,
+ "kerbs": kerbs_path,
+ "other_footways": other_footways_path,
},
- 'data_raw' : {
- 'sidewalks': sidewalks_path_raw,
- 'crossings': crossings_path_raw,
- 'kerbs': kerbs_path_raw,
- 'other_footways' : other_footways_path_raw
+ "data_raw": {
+ "sidewalks": sidewalks_path_raw,
+ "crossings": crossings_path_raw,
+ "kerbs": kerbs_path_raw,
+ "other_footways": other_footways_path_raw,
},
- 'versioning' : {
- 'sidewalks': sidewalks_path_versioning,
- 'crossings': crossings_path_versioning,
- 'kerbs': kerbs_path_versioning,
- 'other_footways' : other_footways_path_versioning
+ "versioning": {
+ "sidewalks": sidewalks_path_versioning,
+ "crossings": crossings_path_versioning,
+ "kerbs": kerbs_path_versioning,
+ "other_footways": other_footways_path_versioning,
},
- 'other_footways_subcategories' : {},
- 'map_layers' : {
- 'sidewalks': sidewalks_path,
- 'crossings': crossings_path,
- 'kerbs': kerbs_path,
+ "other_footways_subcategories": {},
+ "map_layers": {
+ "sidewalks": sidewalks_path,
+ "crossings": crossings_path,
+ "kerbs": kerbs_path,
},
}
# paths for other_footways subcategories:
for subcategory in other_footways_subcatecories:
- subcategory_path = os.path.join(other_footways_folderpath, subcategory+data_format)
- paths_dict['other_footways_subcategories'][subcategory] = subcategory_path
- paths_dict['map_layers'][subcategory] = subcategory_path
+ subcategory_path = os.path.join(
+ other_footways_folderpath, subcategory + data_format
+ )
+ paths_dict["other_footways_subcategories"][subcategory] = subcategory_path
+ paths_dict["map_layers"][subcategory] = subcategory_path
# max radius to cut off unconnected crossings and kerbs
@@ -148,498 +160,426 @@
default_score = 0.5
fields_values_properties = {
- 'sidewalks':{
- 'surface': {
+ "sidewalks": {
+ "surface": {
# colorscheme 12-class Set3 from colorbrewer (thx!!), avaliiable at:
# https://colorbrewer2.org/?type=qualitative&scheme=Set3&n=12
-
- 'asphalt':{
- 'score_default' : 100,
- 'color' : '#fb8072', #
+ "asphalt": {
+ "score_default": 100,
+ "color": "#fb8072", #
},
- 'concrete':{
- 'score_default' : 100,
- 'color' : '#80b1d3',
+ "concrete": {
+ "score_default": 100,
+ "color": "#80b1d3",
},
- 'concrete:plates':{
- 'score_default' : 70,
- 'color' : '#fccde5', #
+ "concrete:plates": {
+ "score_default": 70,
+ "color": "#fccde5", #
},
- 'paving_stones':{
- 'score_default' : 90,
- 'color' : '#bebada', #
+ "paving_stones": {
+ "score_default": 90,
+ "color": "#bebada", #
},
- 'sett':{
- 'score_default' : 60,
- 'color' : '#ffed6f', #
+ "sett": {
+ "score_default": 60,
+ "color": "#ffed6f", #
},
-
- 'cobblestone':{
- 'score_default' : 60,
- 'color' : '#ffed6f', #
+ "cobblestone": {
+ "score_default": 60,
+ "color": "#ffed6f", #
},
-
- 'unhewn_cobblestone':{
- 'score_default' : 50,
- 'color' : '#ffffb3', #black
+ "unhewn_cobblestone": {
+ "score_default": 50,
+ "color": "#ffffb3", # black
},
-
- 'ground':{
- 'score_default' : 30,
- 'color' : '#fdb462' }, #
- 'dirt':{
- 'score_default' : 30,
- 'color' : '#fdb462' }, #
- 'earth':{
- 'score_default' : 30,
- 'color' : '#fdb462', #
+ "ground": {"score_default": 30, "color": "#fdb462"}, #
+ "dirt": {"score_default": 30, "color": "#fdb462"}, #
+ "earth": {
+ "score_default": 30,
+ "color": "#fdb462", #
},
- 'sand':{
- 'score_default' : 30,
- 'color' : '#fdb462', #
+ "sand": {
+ "score_default": 30,
+ "color": "#fdb462", #
},
- 'grass':{
- 'score_default' : 30,
- 'color' : '#b3de69', #
+ "grass": {
+ "score_default": 30,
+ "color": "#b3de69", #
},
# 'grass_paver':{
# 'score_default' : 3,
# 'color' : '#000000', #black
# },
-
- 'paved':{
- 'score_default' : 60, # equals to worst paved: sett
- 'color' : '#ffffff', # white
+ "paved": {
+ "score_default": 60, # equals to worst paved: sett
+ "color": "#ffffff", # white
},
- 'unpaved':{
- 'score_default' : 30,
- 'color' : '#d9d9d9', #
+ "unpaved": {
+ "score_default": 30,
+ "color": "#d9d9d9", #
},
-
# a sample for uncommon values:
-
- 'gravel':{
- 'score_default' : 30,
- 'color' : '#bc80bd', #
+ "gravel": {
+ "score_default": 30,
+ "color": "#bc80bd", #
},
-
- 'compacted':{
- 'score_default' : 30,
- 'color' : '#bc80bd', #
+ "compacted": {
+ "score_default": 30,
+ "color": "#bc80bd", #
},
-
-
- 'ceramic:tiles':{
- 'score_default' : 70,
- 'color' : '#bc80bd', #
+ "ceramic:tiles": {
+ "score_default": 70,
+ "color": "#bc80bd", #
},
-
- 'wood':{
- 'score_default' : 50,
- 'color' : '#bc80bd', #
+ "wood": {
+ "score_default": 50,
+ "color": "#bc80bd", #
},
-
- 'metal':{
- 'score_default' : 100,
- 'color' : '#bc80bd', #
+ "metal": {
+ "score_default": 100,
+ "color": "#bc80bd", #
},
-
# 'Petit_Pavê':{
# 'score_default' : 65,
# 'color' : '#bc80bd', #
# },
-
# for the filled ones:
- '?':{
- 'score_default' : 10,
- 'color' : '#434343', #
+ "?": {
+ "score_default": 10,
+ "color": "#434343", #
},
},
-
- 'wheelchair': {
- '?':{
- 'score_default' : 0, # equivalent to "very horrible"
- 'color' : '#434343', #
+ "wheelchair": {
+ "yes": {
+ "score_default": 0, # equivalent to "very horrible"
+ "color": "#91bfdb", #
},
-
- 'no':{
- 'score_default' : 0, # equivalent to "very horrible"
- 'color' : '#fc8d59', #
+ "designated": {
+ "score_default": 0, # equivalent to "very horrible"
+ "color": "#91bfdb", #
},
-
-
- 'limited':{
- 'score_default' : 0, # equivalent to "very horrible"
- 'color' : '#ffffbf', #
+ "limited": {
+ "score_default": 0, # equivalent to "very horrible"
+ "color": "#ffffbf", #
},
-
- 'yes':{
- 'score_default' : 0, # equivalent to "very horrible"
- 'color' : '#91bfdb', #
+ "no": {
+ "score_default": 0, # equivalent to "very horrible"
+ "color": "#fc8d59", #
},
-
- 'designated':{
- 'score_default' : 0, # equivalent to "very horrible"
- 'color' : '#91bfdb', #
+ "?": {
+ "score_default": 0, # equivalent to "very horrible"
+ "color": "#434343", #
},
-
-
-
-
-
},
-
-
- 'smoothness' : {
- # for absence:
- '?':{
- 'score_default' : 40, # equivalent to "very horrible"
- 'color' : '#434343', #
- },
-
+ "smoothness": {
# color scheme: ColorBrewer (thx!!) 11-class RdYlBu
-
# valid:
- 'excellent':{
- 'score_default' : 10,
- 'color' : '#4575b4', #
+ "excellent": {
+ "score_default": 10,
+ "color": "#4575b4", #
},
- 'good':{
- 'score_default' : 90,
- 'color' : '#abd9e9', #
+ "good": {
+ "score_default": 90,
+ "color": "#abd9e9", #
},
- 'intermediate':{
- 'score_default' : 70,
- 'color' : '#ffffbf', #
+ "intermediate": {
+ "score_default": 70,
+ "color": "#ffffbf", #
},
- 'bad':{
- 'score_default' : 50,
- 'color' : '#fdae61', #
+ "bad": {
+ "score_default": 50,
+ "color": "#fdae61", #
},
- 'very_bad':{
- 'score_default' : 40,
- 'color' : '#fdae61', #
+ "very_bad": {
+ "score_default": 40,
+ "color": "#fdae61", #
},
- 'horrible':{
- 'score_default' : 20,
- 'color' : '#f46d43', #
+ "horrible": {
+ "score_default": 20,
+ "color": "#f46d43", #
},
- 'very_horrible':{
- 'score_default' : 10,
- 'color' : '#f46d43', #
+ "very_horrible": {
+ "score_default": 10,
+ "color": "#f46d43", #
},
- 'impassable':{
- 'score_default' : 0,
- 'color' : '#a50026', #
+ "impassable": {
+ "score_default": 0,
+ "color": "#a50026", #
},
-
-
- # invalid values must be handled individually
+ # for absence:
+ "?": {
+ "score_default": 40, # equivalent to "very horrible"
+ "color": "#434343", #
+ },
+ # invalid values must be handled individually
},
- 'lit' : {
- '?':{
- 'score_default' : 10,
- 'color' : '#434343', #
+ "lit": {
+ "yes": {
+ "score_default": 10,
+ "color": "#ffff99", #
},
-
- 'yes':{
- 'score_default' : 10,
- 'color' : '#ffff99', #
+ "automatic": {
+ "score_default": 10,
+ "color": "#ffff99", #
},
- 'automatic':{
- 'score_default' : 10,
- 'color' : '#ffff99', #
+ "24/7": {
+ "score_default": 10,
+ "color": "#ffff99", #
},
- 'no':{
- 'score_default' : 10,
- 'color' : '#6a3d9a', #
+ "no": {
+ "score_default": 10,
+ "color": "#6a3d9a", #
},
- 'disused':{
- 'score_default' : 10,
- 'color' : '#6a3d9a', #
+ "disused": {
+ "score_default": 10,
+ "color": "#6a3d9a", #
},
- '24/7':{
- 'score_default' : 10,
- 'color' : '#ffff99', #
+ "?": {
+ "score_default": 10,
+ "color": "#434343", #
},
},
-
- 'width':{
- '?':{
- 'score_default' : 10,
- 'color' : '#434343', #
+ "width": {
+ "?": {
+ "score_default": 10,
+ "color": "#434343", #
},
# in a future...
},
- 'incline':{
- '?':{
- 'score_default' : 10,
- 'color' : '#434343', #
+ "incline": {
+ "?": {
+ "score_default": 10,
+ "color": "#434343", #
},
# in a future...
},
- 'tactile_paving':{
- #CHECK KERBS
+ "tactile_paving": {
+ # CHECK KERBS
},
- 'incline:across':{
- '?':{
- 'score_default' : 10,
- 'color' : '#434343', #
+ "incline:across": {
+ "?": {
+ "score_default": 10,
+ "color": "#434343", #
},
# in a future...
- }
+ },
},
-
- 'kerbs':{
- 'kerb':{
- 'raised':{
- 'score_default' : -30,
- 'color' : '#000000', #black
+ "kerbs": {
+ "kerb": {
+ "raised": {
+ "score_default": -30,
+ "color": "#000000", # black
},
- 'rolled':{
- 'score_default' : 0,
- 'color' : '#808080', #50% gray
+ "rolled": {
+ "score_default": 0,
+ "color": "#808080", # 50% gray
},
- 'no':{
- 'score_default' : 10,
- 'color' : '#bebebe', #75% hray
+ "no": {
+ "score_default": 10,
+ "color": "#bebebe", # 75% hray
},
- 'lowered':{
- 'score_default' : 50,
- 'color' : '#ffffff', #white
+ "lowered": {
+ "score_default": 50,
+ "color": "#ffffff", # white
},
- 'flush':{
- 'score_default' : 60,
- 'color' : '#ffffff', #white
+ "flush": {
+ "score_default": 60,
+ "color": "#ffffff", # white
},
-
- '?':{
- 'score_default' : -10, # equivalent to "raised"
- 'color' : '#d9d9d9', #
+ "?": {
+ "score_default": -10, # equivalent to "raised"
+ "color": "#d9d9d9", #
},
-
},
- 'tactile_paving':{
- 'yes':{
- 'score_default' : 100,
- 'color' : '#6146d0',
-
- 'opacity' : 1,
- },
- 'contrasted':{
- 'score_default' : 100,
- 'color' : '#6146d0',
-
- 'opacity' : 1,
-
- },
- 'no':{
- 'score_default' : 0,
- 'color' : '#bd1006',
-
- 'opacity' : 0,
-
+ "tactile_paving": {
+ "yes": {
+ "score_default": 100,
+ "color": "#6146d0",
+ "opacity": 1,
+ },
+ "contrasted": {
+ "score_default": 100,
+ "color": "#6146d0",
+ "opacity": 1,
+ },
+ "no": {
+ "score_default": 0,
+ "color": "#bd1006",
+ "opacity": 0,
+ },
+ "?": {
+ "score_default": 0, # equivalent to "no"
+ "color": "#717171", # "#434343", #
+ "opacity": 0,
},
-
- '?':{
- 'score_default' : 0, # equivalent to "no"
- 'color' : "#717171",#"#434343", #
-
- 'opacity' : 0,
-
- },
-
- }
},
-
- 'crossings':{
+ },
+ "crossings": {
# default scores should be what was named "bonus"
- 'crossing': {
+ "crossing": {
# base color-scheme: ColorBrewer (thx!!) 12-class Paired
- 'no':{
+ "no": {
# 'score_default' : 0,
# 'bonus' : -100,
- 'score_default' : -100,
-
- 'dasharray' :"0",
- 'dashoffset': '0',
-
- 'color' : '#e31a1c', # RED
-
+ "score_default": -100,
+ "dasharray": "0",
+ "dashoffset": "0",
+ "color": "#e31a1c", # RED
},
- 'unmarked':{
+ "unmarked": {
# 'score_default' : 70,
# 'bonus' : 0,
- 'score_default' : 0,
-
+ "score_default": 0,
# may get help on: https://gigacore.github.io/demos/svg-stroke-dasharray-generator/
-
- 'dasharray' :"5,10",
- 'dashoffset': '0',
-
-
- 'color' : '#ffff99',
+ "dasharray": "5,10",
+ "dashoffset": "0",
+ "color": "#ffff99",
},
- 'marked':{
+ "marked": {
# 'score_default' : 90,
# 'bonus' : 20,
-
- 'score_default' : 20,
-
- 'dasharray' :"0",
- 'dashoffset': '0',
-
-
- 'color' : '#a6cee3',
+ "score_default": 20,
+ "dasharray": "0",
+ "dashoffset": "0",
+ "color": "#a6cee3",
},
-
- 'zebra':{
+ "zebra": {
# 'score_default' : 90,
# 'bonus' : 20,
-
- 'score_default' : 20,
-
- 'dasharray' :"0",
- 'dashoffset': '0',
-
-
- 'color' : '#a6cee3',
+ "score_default": 20,
+ "dasharray": "0",
+ "dashoffset": "0",
+ "color": "#a6cee3",
},
-
- 'uncontrolled':{
+ "uncontrolled": {
# 'score_default' : 100,
# 'bonus' : 30,
-
- 'score_default' : 30,
-
- 'dasharray' :"0",
- 'dashoffset': '0',
-
-
- 'color' : '#a6cee3',
+ "score_default": 30,
+ "dasharray": "0",
+ "dashoffset": "0",
+ "color": "#a6cee3",
},
-
- 'traffic_signals':{
+ "traffic_signals": {
# 'score_default' : 100,
# 'bonus' : 30,
-
- 'score_default' : 30,
-
- 'dasharray' :"0",
- 'dashoffset': '0',
-
- 'color' : '#1f78b4',
+ "score_default": 30,
+ "dasharray": "0",
+ "dashoffset": "0",
+ "color": "#1f78b4",
},
-
- '?':{
+ "?": {
# 'score_default' : 10,
# 'bonus' : 0,
-
- 'score_default' : 0,
-
- 'dasharray' :"0",
- 'dashoffset': '0',
-
-
- 'color' : 'gray', #
+ "score_default": 0,
+ "dasharray": "0",
+ "dashoffset": "0",
+ "color": "gray", #
},
-
-
},
- 'surface':{
+ "surface": {
# CHECK SIDEWALKS
-
},
- 'smoothness':{
+ "smoothness": {
# CHECK SIDEWALKS
-
-
},
- 'traffic_calming':{
- 'table':{
+ "traffic_calming": {
+ "table": {
# 'score_default' : 100,
- 'score_default' : 20,
-
+ "score_default": 20,
# 'bonus' : 20,
- 'color' : '#ffff99',
+ "color": "#ffff99",
},
-
- 'bump':{
+ "bump": {
# 'score_default' : 100,
- 'score_default' : 20,
-
+ "score_default": 20,
# 'bonus' : 20,
- 'color' : '#ffff99',
+ "color": "#ffff99",
},
-
- 'hump':{
+ "hump": {
# 'score_default' : 100,
- 'score_default' : 20,
-
+ "score_default": 20,
# 'bonus' : 20,
- 'color' : '#ffff99',
+ "color": "#ffff99",
},
-
- '?':{
- 'score_default' : 0,
- 'color' : '#63636399', #
+ "?": {
+ "score_default": 0,
+ "color": "#63636399", #
},
-
- }
-}
+ },
+ },
}
layernames = [key for key in fields_values_properties]
# values to be copied:
-fields_values_properties['sidewalks']['tactile_paving'] = fields_values_properties['kerbs']['tactile_paving']
+fields_values_properties["sidewalks"]["tactile_paving"] = fields_values_properties[
+ "kerbs"
+]["tactile_paving"]
-fields_values_properties['crossings']['surface'] = fields_values_properties['sidewalks']['surface']
+fields_values_properties["crossings"]["surface"] = fields_values_properties[
+ "sidewalks"
+]["surface"]
-fields_values_properties['crossings']['smoothness'] = fields_values_properties['sidewalks']['smoothness']
+fields_values_properties["crossings"]["smoothness"] = fields_values_properties[
+ "sidewalks"
+]["smoothness"]
# required_fields:
req_fields = {
- 'sidewalks':['surface','smoothness','width','incline','tactile_paving','incline:across','osm_id','last_update'],
- 'kerbs':['kerb','tactile_paving','osm_id','last_update'],
- 'crossings':['crossing','surface','smoothness','traffic_calming','osm_id','last_update'],
+ "sidewalks": [
+ "surface",
+ "smoothness",
+ "width",
+ "incline",
+ "tactile_paving",
+ "incline:across",
+ "osm_id",
+ "last_update",
+ ],
+ "kerbs": ["kerb", "tactile_paving", "osm_id", "last_update"],
+ "crossings": [
+ "crossing",
+ "surface",
+ "smoothness",
+ "traffic_calming",
+ "osm_id",
+ "last_update",
+ ],
}
# a case of "smoothness=concrete:pĺates" demanded this
-wrong_misspelled_values ={
- 'sidewalks':{
- 'smoothness':{'concrete:plates':'?'},
- 'surface':{'betão':'?','Petit_Pavê':'sett','porcelain tiles':'ceramic:tiles'}
- },
- 'kerbs':{
-
- },
- 'crossings':{
-
- },
- 'other_footways':{
-
+wrong_misspelled_values = {
+ "sidewalks": {
+ "smoothness": {"concrete:plates": "?"},
+ "surface": {
+ "betão": "?",
+ "Petit_Pavê": "sett",
+ "porcelain tiles": "ceramic:tiles",
+ },
},
+ "kerbs": {},
+ "crossings": {},
+ "other_footways": {},
}
geom_type_dict = {
- 'sidewalks':['LineString'],
- 'crossings':['LineString'],
- 'kerbs':['Point'],
- 'other_footways':['LineString','Polygon','MultiPolygon']
+ "sidewalks": ["LineString"],
+ "crossings": ["LineString"],
+ "kerbs": ["Point"],
+ "other_footways": ["LineString", "Polygon", "MultiPolygon"],
}
-all_layers_geom_types = {k:v[0] for k,v in geom_type_dict.items()}
-del all_layers_geom_types['other_footways']
+all_layers_geom_types = {k: v[0] for k, v in geom_type_dict.items()}
+del all_layers_geom_types["other_footways"]
for subcategory in other_footways_geometry_types:
all_layers_geom_types[subcategory] = other_footways_geometry_types[subcategory]
-statistics_basepath = 'statistics'
+statistics_basepath = "statistics"
-# defined here to avoid circular importing problems
-def get_url(relative_url,base_url=node_homepage_url):
- return os.path.join(base_url,relative_url)
+# defined here to avoid circular importing problems
+def get_url(relative_url, base_url=node_homepage_url):
+ return os.path.join(base_url, relative_url)
diff --git a/functions.py b/functions.py
index b5f8ed2..8edbe6a 100644
--- a/functions.py
+++ b/functions.py
@@ -2,7 +2,7 @@
import bs4
from time import sleep, time
import pandas as pd
-from datetime import datetime
+from datetime import datetime
import json, requests
from xml.etree import ElementTree
import geopandas as gpd
@@ -14,48 +14,53 @@
READ/ DUMP STUFF
"""
+
+
def read_json(inputpath):
with open(inputpath) as reader:
data = reader.read()
return json.loads(data)
-
-def dump_json(inputdict,outputpath,indent=4):
- with open(outputpath,'w+',encoding='utf8') as json_handle:
- json.dump(inputdict,json_handle,indent=indent,ensure_ascii=False)
-def file_as_string(inputpath:str):
+
+def dump_json(inputdict, outputpath, indent=4):
+ with open(outputpath, "w+", encoding="utf8") as json_handle:
+ json.dump(inputdict, json_handle, indent=indent, ensure_ascii=False)
+
+
+def file_as_string(inputpath: str):
if os.path.exists(inputpath):
- with open(inputpath,encoding='utf8') as reader:
+ with open(inputpath, encoding="utf8") as reader:
return reader.read()
else:
- raise(FileNotFoundError)
-
-def str_to_file(inputstr:str,outputpath:str,check_path=False):
- if check_path:
- if not os.path.exists(outputpath):
- raise(FileNotFoundError)
+ raise (FileNotFoundError)
+
+def str_to_file(inputstr: str, outputpath: str, check_path=False):
+ if check_path:
+ if not os.path.exists(outputpath):
+ raise (FileNotFoundError)
- with open(outputpath,'w+',encoding='utf8') as writer:
+ with open(outputpath, "w+", encoding="utf8") as writer:
writer.write(inputstr)
sleep(0.1)
+
class fileAsStrHandler:
- def __init__(self,inputpath:str):
+ def __init__(self, inputpath: str):
self.path = inputpath
self.content = file_as_string(self.path)
- def simple_replace(self,original_part,new_part=''):
+ def simple_replace(self, original_part, new_part=""):
"""default is empty for just remove the selected content"""
- self.content = self.content.replace(original_part,new_part)
+ self.content = self.content.replace(original_part, new_part)
def rewrite(self):
- str_to_file(self.content,self.path)
-
- def write_to_another_path(self,outputpath):
- str_to_file(self.content,outputpath)
+ str_to_file(self.content, self.path)
+
+ def write_to_another_path(self, outputpath):
+ str_to_file(self.content, outputpath)
"""
@@ -64,30 +69,30 @@ def write_to_another_path(self,outputpath):
"""
+
def formatted_datetime_now():
now = datetime.now()
return now.strftime("%d/%m/%Y %H:%M:%S")
-
-def record_datetime(key,json_path='data/last_updated.json'):
+def record_datetime(key, json_path="data/last_updated.json"):
datadict = read_json(json_path)
datadict[key] = formatted_datetime_now()
- dump_json(datadict,json_path)
+ dump_json(datadict, json_path)
+
+ sleep(0.1)
- sleep(.1)
-def record_to_json(key,obj,json_path):
+def record_to_json(key, obj, json_path):
datadict = read_json(json_path)
datadict[key] = obj
- dump_json(datadict,json_path)
-
+ dump_json(datadict, json_path)
"""
@@ -119,10 +124,12 @@ def record_to_json(key,obj,json_path):
"""
-def gen_updating_infotable_page(outpath='data/data_updating.html',json_path='data/last_updated.json'):
+def gen_updating_infotable_page(
+ outpath="data/data_updating.html", json_path="data/last_updated.json"
+):
- tablepart = ''
+ tablepart = ""
records_dict = read_json(json_path)
@@ -131,7 +138,7 @@ def gen_updating_infotable_page(outpath='data/data_updating.html',json_path='dat
{key} | {records_dict[key]} |
"""
- page_as_txt = f'''
+ page_as_txt = f"""
@@ -190,17 +197,27 @@ def gen_updating_infotable_page(outpath='data/data_updating.html',json_path='dat
- '''
+ """
# with open(outpath,'w+') as writer:
# writer.write(page_as_txt)
-
- str_to_file(page_as_txt,outpath)
+ str_to_file(page_as_txt, outpath)
-def gen_quality_report_page_and_files(outpath,tabledata,feat_type,category,quality_category,text,occ_type,csvpath,count_page=False):
- pagename_base = f'{quality_category}_{category}'
+def gen_quality_report_page_and_files(
+ outpath,
+ tabledata,
+ feat_type,
+ category,
+ quality_category,
+ text,
+ occ_type,
+ csvpath,
+ count_page=False,
+):
+
+ pagename_base = f"{quality_category}_{category}"
csv_url = f"""
@@ -215,7 +232,6 @@ def gen_quality_report_page_and_files(outpath,tabledata,feat_type,category,quali
commentary |
"""
-
if count_page:
tablepart = f"""
OSM ID (link) |
@@ -225,37 +241,36 @@ def gen_quality_report_page_and_files(outpath,tabledata,feat_type,category,quali
valid_featcount = 0
- with open(csvpath,'w+') as file:
- writer = csv.writer(file,delimiter=',',quotechar='"')
- writer.writerow(['osm_id','key','value','commentary'])
+ with open(csvpath, "w+") as file:
+ writer = csv.writer(file, delimiter=",", quotechar='"')
+ writer.writerow(["osm_id", "key", "value", "commentary"])
+
+ for line in tabledata:
+ try:
+ line_as_str = ""
+ if line:
+ if len(line) > 2:
+ if not pd.isna(line[2]):
+
+ writer.writerow(line)
- for line in tabledata:
- try:
- line_as_str = ''
- if line:
- if len(line)> 2:
- if not pd.isna(line[2]):
+ line[0] = return_weblink_V2(str(line[0]), feat_type)
- writer.writerow(line)
+ line_as_str += "
"
- line[0] = return_weblink_V2(str(line[0]),feat_type)
-
- line_as_str += "
"
-
- for element in line:
- line_as_str += f"{str(element)} | "
-
- line_as_str += "
\n"
+ for element in line:
+ line_as_str += f"{str(element)} | "
- tablepart += line_as_str
+ line_as_str += "\n"
- valid_featcount += 1
- except:
- if line:
- print('skipped',line)
+ tablepart += line_as_str
+ valid_featcount += 1
+ except:
+ if line:
+ print("skipped", line)
- with open(outpath,'w+') as writer:
+ with open(outpath, "w+") as writer:
page = f"""
@@ -304,40 +319,45 @@ def gen_quality_report_page_and_files(outpath,tabledata,feat_type,category,quali
return valid_featcount
-
def find_map_ref(input_htmlpath):
with open(input_htmlpath) as inf:
txt = inf.read()
- soup = bs4.BeautifulSoup(txt,features='html5lib')
+ soup = bs4.BeautifulSoup(txt, features="html5lib")
- refs = soup.find_all(attrs={'class':"folium-map"})
+ refs = soup.find_all(attrs={"class": "folium-map"})
for found_ref in refs:
- return found_ref['id']
-
-
+ return found_ref["id"]
-def find_html_name(input_htmlpath,specific_ref,tag_ref='img',specific_tag='src',identifier='id'):
+def find_html_name(
+ input_htmlpath, specific_ref, tag_ref="img", specific_tag="src", identifier="id"
+):
with open(input_htmlpath) as inf:
txt = inf.read()
- soup = bs4.BeautifulSoup(txt,features='html5lib')
+ soup = bs4.BeautifulSoup(txt, features="html5lib")
refs = soup.find_all(tag_ref)
-
for found_ref in refs:
# if specific_tag in found_ref:
if found_ref[specific_tag] == specific_ref:
return found_ref[identifier]
-
-def style_changer(in_out_htmlpath,element_key,key='style',original='bottom',new='top',append_t=None):
+
+def style_changer(
+ in_out_htmlpath,
+ element_key,
+ key="style",
+ original="bottom",
+ new="top",
+ append_t=None,
+):
with open(in_out_htmlpath) as inf:
txt = inf.read()
- soup = bs4.BeautifulSoup(txt,features='html5lib')
+ soup = bs4.BeautifulSoup(txt, features="html5lib")
style_refs = soup.find_all(key)
@@ -346,7 +366,7 @@ def style_changer(in_out_htmlpath,element_key,key='style',original='bottom',new=
if element_key in as_txt:
if new:
- new_text = as_txt.replace(original,new)
+ new_text = as_txt.replace(original, new)
else:
new_text = as_txt
@@ -355,54 +375,53 @@ def style_changer(in_out_htmlpath,element_key,key='style',original='bottom',new=
break
-
- with open(in_out_htmlpath,'w+', encoding='utf-8') as writer:
- writer.write(str(soup).replace(as_txt,new_text))
+ with open(in_out_htmlpath, "w+", encoding="utf-8") as writer:
+ writer.write(str(soup).replace(as_txt, new_text))
sleep(0.2)
-
-def add_to_page_after_first_tag(html_filepath,element_string,tag_or_txt='',count=1):
- '''
- Quick and dirty way to insert some stuff directly on the webpage
+
+def add_to_page_after_first_tag(
+ html_filepath, element_string, tag_or_txt="", count=1
+):
+ """
+ Quick and dirty way to insert some stuff directly on the webpage
Originally intended only for
beware of tags that repeat! the "count" argument is very important!
- '''
-
+ """
with open(html_filepath) as reader:
pag_txt = reader.read()
- replace_text = f'{tag_or_txt} \n{element_string}\n'
+ replace_text = f"{tag_or_txt} \n{element_string}\n"
+
+ with open(html_filepath, "w+") as writer:
+ writer.write(pag_txt.replace(tag_or_txt, replace_text, count))
-
- with open(html_filepath,'w+') as writer:
- writer.write(pag_txt.replace(tag_or_txt,replace_text,count))
+ sleep(0.1)
- sleep(.1)
-def replace_at_html(html_filepath,original_text,new_text,count=1):
- '''
- Quick and dirty way to replace some stuff directly on the webpage
+def replace_at_html(html_filepath, original_text, new_text, count=1):
+ """
+ Quick and dirty way to replace some stuff directly on the webpage
Originally intended only for
beware of tags that repeat! the "count" argument is very important!
- '''
+ """
if os.path.exists(html_filepath):
with open(html_filepath) as reader:
pag_txt = reader.read()
-
- with open(html_filepath,'w+') as writer:
- writer.write(pag_txt.replace(original_text,new_text,count))
+ with open(html_filepath, "w+") as writer:
+ writer.write(pag_txt.replace(original_text, new_text, count))
else:
- raise('Error: file not found!!')
+ raise ("Error: file not found!!")
- sleep(.1)
+ sleep(0.1)
# def file_to_str(filepath):
@@ -412,13 +431,21 @@ def replace_at_html(html_filepath,original_text,new_text,count=1):
# return pag_txt
-def find_between_strings(string, start, end,return_unique=True,exclusions:list=None,include_linebreaks=False):
+
+def find_between_strings(
+ string,
+ start,
+ end,
+ return_unique=True,
+ exclusions: list = None,
+ include_linebreaks=False,
+):
pattern = f"{start}(.*){end}"
# print(pattern)
if include_linebreaks:
- matches = re.findall(pattern, string,re.DOTALL)
+ matches = re.findall(pattern, string, re.DOTALL)
else:
- matches = re.findall(pattern, string)
+ matches = re.findall(pattern, string)
if return_unique:
matches = list(set(matches))
@@ -430,68 +457,89 @@ def find_between_strings(string, start, end,return_unique=True,exclusions:list=N
# (geo)Pandas stuff:
-def get_score_df(inputdict,category='sidewalks',osm_key='surface',input_field='score_default',output_field_base='score'):
+def get_score_df(
+ inputdict,
+ category="sidewalks",
+ osm_key="surface",
+ input_field="score_default",
+ output_field_base="score",
+):
- output_field_name = f'{category}_{osm_key}_{output_field_base}'
- dict = {osm_key:[],output_field_name:[]}
+ output_field_name = f"{category}_{osm_key}_{output_field_base}"
+ dict = {osm_key: [], output_field_name: []}
for val_key in inputdict[category][osm_key]:
dict[osm_key].append(val_key)
- dict[output_field_name].append(inputdict[category][osm_key][val_key][input_field])
+ dict[output_field_name].append(
+ inputdict[category][osm_key][val_key][input_field]
+ )
- return pd.DataFrame(dict), output_field_name
+ return pd.DataFrame(dict), output_field_name
-def get_attr_dict(inputdict,category='sidewalks',osm_tag='surface',attr='color'):
+def get_attr_dict(inputdict, category="sidewalks", osm_tag="surface", attr="color"):
color_dict = {}
for tag_value in inputdict[category][osm_tag]:
color_dict[tag_value] = inputdict[category][osm_tag][tag_value][attr]
return color_dict
+
def return_weblink_way(string_id):
return f"{string_id}"
+
def return_weblink_node(string_id):
return f"{string_id}"
-def return_weblink_V2(string_id,featuretype):
+
+def return_weblink_V2(string_id, featuretype):
return f"{string_id}"
+
def return_weblink_V3(type_id_string):
- featuretype,string_id = type_id_string.split('_')
+ featuretype, string_id = type_id_string.split("_")
return f"{string_id}"
-'''
+
+"""
HISTORY STUFF
-'''
+"""
+
-def get_feature_history_url(featureid,type='way'):
- return f'https://www.openstreetmap.org/api/0.6/{type}/{featureid}/history'
+def get_feature_history_url(featureid, type="way"):
+ return f"https://www.openstreetmap.org/api/0.6/{type}/{featureid}/history"
-def parse_datetime_str(inputstr,format='ymdhms'):
+
+def parse_datetime_str(inputstr, format="ymdhms"):
format_dict = {
- 'ymdhms' : '%Y-%m-%dT%H:%M:%S',
+ "ymdhms": "%Y-%m-%dT%H:%M:%S",
}
- return datetime.strptime(inputstr,format_dict[format])
+ return datetime.strptime(inputstr, format_dict[format])
-def get_datetime_last_update(featureid,featuretype='way',onlylast=True,return_parsed=True,return_special_tuple=True):
+def get_datetime_last_update(
+ featureid,
+ featuretype="way",
+ onlylast=True,
+ return_parsed=True,
+ return_special_tuple=True,
+):
- h_url = get_feature_history_url(featureid,featuretype)
+ h_url = get_feature_history_url(featureid, featuretype)
try:
response = requests.get(h_url)
except:
if onlylast:
if return_parsed and return_special_tuple:
- return [None]*4 #4 Nones
+ return [None] * 4 # 4 Nones
- return ''
+ return ""
else:
return []
@@ -501,20 +549,19 @@ def get_datetime_last_update(featureid,featuretype='way',onlylast=True,return_pa
element_list = tree.findall(featuretype)
if element_list:
- date_rec = [element.attrib['timestamp'][:-1] for element in element_list]
+ date_rec = [element.attrib["timestamp"][:-1] for element in element_list]
if onlylast:
if return_parsed:
if return_special_tuple:
# parsed = datetime.strptime(date_rec[-1],'%Y-%m-%dT%H:%M:%S')
parsed = parse_datetime_str(date_rec[-1])
- return len(date_rec),parsed.day,parsed.month,parsed.year
+ return len(date_rec), parsed.day, parsed.month, parsed.year
else:
# return datetime.strptime(date_rec[-1],'%Y-%m-%dT%H:%M:%S')
return parse_datetime_str(date_rec[-1])
-
else:
return date_rec[-1]
@@ -525,42 +572,49 @@ def get_datetime_last_update(featureid,featuretype='way',onlylast=True,return_pa
else:
return date_rec
-
else:
if onlylast:
- return ''
+ return ""
else:
return []
-
+
else:
- print('bad request, check feature id/type')
+ print("bad request, check feature id/type")
if onlylast:
- return ''
+ return ""
else:
return []
def get_datetime_last_update_node(featureid):
# all default options
- return get_datetime_last_update(featureid,featuretype='node')
+ return get_datetime_last_update(featureid, featuretype="node")
-def print_relevant_columnamesV2(input_df,not_include=('score','geometry','type','id'),outfilepath=None):
+def print_relevant_columnamesV2(
+ input_df, not_include=("score", "geometry", "type", "id"), outfilepath=None
+):
- as_list = [column for column in input_df.columns if not any(word in column for word in not_include)]
+ as_list = [
+ column
+ for column in input_df.columns
+ if not any(word in column for word in not_include)
+ ]
# print(*as_list)
if outfilepath:
- with open(outfilepath,'w+') as writer:
- writer.write(','.join(as_list))
+ with open(outfilepath, "w+") as writer:
+ writer.write(",".join(as_list))
return as_list
-def check_if_wikipage_exists(name,category="Key:",wiki_page='https://wiki.openstreetmap.org/wiki/'):
+def check_if_wikipage_exists(
+ name, category="Key:", wiki_page="https://wiki.openstreetmap.org/wiki/"
+):
- url = f'{wiki_page}{category}{name}'
+ url = f"{wiki_page}{category}{name}"
while True:
try:
@@ -571,58 +625,71 @@ def check_if_wikipage_exists(name,category="Key:",wiki_page='https://wiki.openst
return status == 200
+
"""
geopandas
"""
-def gdf_to_js_file(input_gdf,output_path,output_varname):
+
+def gdf_to_js_file(input_gdf, output_path, output_varname):
"""
- this function converts a geopandas dataframe to a javascript file, was the only thing that worked for vectorGrid module
+ this function converts a geopandas dataframe to a javascript file, was the only thing that worked for vectorGrid module
- returns the importing to be included in the html file
+ returns the importing to be included in the html file
"""
input_gdf.to_file(output_path)
- as_str = f"{output_varname} = "+ file_as_string(output_path)
+ as_str = f"{output_varname} = " + file_as_string(output_path)
- str_to_file(as_str,output_path)
+ str_to_file(as_str, output_path)
return f''
-def create_length_field(input_gdf,fieldname='length(km)',in_km=True):
+
+def create_length_field(input_gdf, fieldname="length(km)", in_km=True):
factor = 1
if in_km:
factor = 1000
utm_crs = input_gdf.estimate_utm_crs()
- input_gdf['length(km)'] = input_gdf.to_crs(utm_crs).length/factor
+ input_gdf["length(km)"] = input_gdf.to_crs(utm_crs).length / factor
+
-def create_weblink_field(input_gdf,featuretype='LineString',inputfield='id',fieldname='weblink'):
- if featuretype == 'LineString':
- input_gdf[fieldname] = input_gdf[inputfield].astype('string').apply(return_weblink_way)
- if featuretype == 'Point':
- input_gdf[fieldname] = input_gdf[inputfield].astype('string').apply(return_weblink_node)
+def create_weblink_field(
+ input_gdf, featuretype="LineString", inputfield="id", fieldname="weblink"
+):
+ if featuretype == "LineString":
+ input_gdf[fieldname] = (
+ input_gdf[inputfield].astype("string").apply(return_weblink_way)
+ )
+ if featuretype == "Point":
+ input_gdf[fieldname] = (
+ input_gdf[inputfield].astype("string").apply(return_weblink_node)
+ )
def create_folder_if_not_exists(folderpath):
if not os.path.exists(folderpath):
os.makedirs(folderpath)
+
def create_folderlist(folderlist):
for folder in folderlist:
create_folder_if_not_exists(folder)
+
def remove_if_exists(pathfile):
if os.path.exists(pathfile):
os.remove(pathfile)
+
def listdir_fullpath(path):
return [os.path.join(path, file) for file in os.listdir(path)]
-def get_territory_polygon(place_name,outpath=None,outpath_metadata=None):
+def get_territory_polygon(place_name, outpath=None, outpath_metadata=None):
"""
This function takes a place name as input and retrieves the corresponding territory polygon using the Nominatim API. It can also optionally save the polygon as a GeoJSON file.
@@ -641,37 +708,40 @@ def get_territory_polygon(place_name,outpath=None,outpath_metadata=None):
# Parse the response as a JSON object
data = response.json()
-
# sort data by "importance", that is a key in each dictionary of the list:
data.sort(key=lambda x: x["importance"], reverse=True)
# Get the polygon of the territory as a GeoJSON object
- polygon = data[0]['geojson']
+ polygon = data[0]["geojson"]
if outpath:
dump_json(polygon, outpath)
if outpath_metadata:
- if 'geojson' in data[0]:
- del data[0]['geojson']
+ if "geojson" in data[0]:
+ del data[0]["geojson"]
dump_json(data[0], outpath_metadata)
# Return the polygon
return polygon
+
def geodataframe_from_a_geometry(geometry):
return gpd.GeoDataFrame(geometry=[geometry])
-def bbox_geodataframe(bbox,resort=True):
+
+def bbox_geodataframe(bbox, resort=True):
if resort:
bbox = resort_bbox(bbox)
return gpd.GeoDataFrame(geometry=[box(*bbox)])
+
def resort_bbox(bbox):
- return [bbox[1],bbox[0],bbox[3],bbox[2]]
+ return [bbox[1], bbox[0], bbox[3], bbox[2]]
+
def merge_list_of_dictionaries(list_of_dicts):
merged_dict = {}
@@ -686,29 +756,34 @@ def merge_list_of_dictionaries(list_of_dicts):
else:
merged_dict[key].append(value)
else:
- merged_dict[key] = value if not isinstance(value, list) else value.copy()
+ merged_dict[key] = (
+ value if not isinstance(value, list) else value.copy()
+ )
return merged_dict
+
def join_to_node_homepage(input_list_or_str):
- if isinstance(input_list_or_str,list):
- return os.path.join(node_homepage_url,*input_list_or_str)
+ if isinstance(input_list_or_str, list):
+ return os.path.join(node_homepage_url, *input_list_or_str)
else:
- return os.path.join(node_homepage_url,input_list_or_str)
+ return os.path.join(node_homepage_url, input_list_or_str)
+
def save_geoparquet(input_gdf, outpath):
"""
- Saves a GeoDataFrame to a Parquet file.
+ Saves a GeoDataFrame to a Parquet file.
If the GeoDataFrame is empty, creates an empty Parquet file.
Workaround for: https://github.com/geopandas/geopandas/issues/3137
"""
if input_gdf.empty:
- gpd.GeoDataFrame(columns=['geometry']).to_parquet(outpath)
+ gpd.GeoDataFrame(columns=["geometry"]).to_parquet(outpath)
else:
input_gdf.to_parquet(outpath)
-def row_query(df, querydict, mode='any',reverse=False):
+
+def row_query(df, querydict, mode="any", reverse=False):
"""
Apply a query to each row in a DataFrame and return a boolean result.
@@ -729,37 +804,60 @@ def row_query(df, querydict, mode='any',reverse=False):
2 False
dtype: bool
"""
- if mode == 'any':
- selection = df.isin(querydict).any(axis=1)
- elif mode == 'all':
- selection = df.isin(querydict).all(axis=1)
-
+ if mode == "any":
+ selection = df.isin(querydict).any(axis=1)
+ elif mode == "all":
+ selection = df.isin(querydict).all(axis=1)
+
if reverse:
return ~selection
else:
return selection
-
+
+
def get_gdfs_dict(raw_data=False):
# used dict: paths_dict
- category_group = 'data_raw' if raw_data else 'data'
+ category_group = "data_raw" if raw_data else "data"
+
+ return {
+ category: gpd.read_parquet(paths_dict[category_group][category])
+ for category in paths_dict[category_group]
+ }
- return {category: gpd.read_parquet(paths_dict[category_group][category]) for category in paths_dict[category_group]}
-def get_gdfs_dict_v2(category='data'):
+def get_gdfs_dict_v2(category="data"):
"""
available categories: 'data', 'data_raw','other_footways_subcategories', 'map_layers'
"""
- return {category: gpd.read_parquet(paths_dict[category][category]) for category in paths_dict[category]}
+ return {
+ category: gpd.read_parquet(paths_dict[category][category])
+ for category in paths_dict[category]
+ }
+
-def remove_empty_columns(gdf,report=False):
+def remove_empty_columns(gdf, report=False):
if report:
prev = len(gdf.columns)
- gdf.dropna(axis='columns',how='all',inplace=True)
+ gdf.dropna(axis="columns", how="all", inplace=True)
if report:
- print(f' removed {prev-len(gdf.columns)} empty columns')
-
+ print(f" removed {prev-len(gdf.columns)} empty columns")
+
+
def get_boundaries_bbox():
- return list(gpd.read_file(boundaries_geojson_path).total_bounds)
\ No newline at end of file
+ return list(gpd.read_file(boundaries_geojson_path).total_bounds)
+
+
+def rename_dict_key(
+ dictionary, old_key, new_key, ignore_missing=True, ignore_existing=True
+):
+ if old_key in dictionary and not ignore_missing:
+ raise KeyError(f"Key {old_key} not found in dictionary")
+
+ if new_key in dictionary and not ignore_existing:
+ raise KeyError(f"Key {new_key} already exists in dictionary")
+
+ if old_key in dictionary:
+ dictionary[new_key] = dictionary.pop(old_key)
diff --git a/webmap/create_webmap_new.py b/webmap/create_webmap_new.py
index 0a02ffe..e87b39c 100644
--- a/webmap/create_webmap_new.py
+++ b/webmap/create_webmap_new.py
@@ -3,7 +3,7 @@
# create a --development flag:
parser = argparse.ArgumentParser()
-parser.add_argument('--development', action='store_true')
+parser.add_argument("--development", action="store_true")
args = parser.parse_args()
in_dev = args.development
@@ -12,16 +12,16 @@
params = read_json(webmap_params_original_path)
# then override and fill in with the stuff:
-params['data_layers'] = MAP_DATA_LAYERS
+params["data_layers"] = MAP_DATA_LAYERS
# the layers that by type:
-params['layer_types'] = layer_type_groups
+params["layer_types"] = layer_type_groups
# boundaries:
-params['bounds'] = get_boundaries_bbox()
+params["bounds"] = get_boundaries_bbox()
# updating the node's url:
-params['node_url'] = node_homepage_url
+params["node_url"] = node_homepage_url
# # generating the "sources" and layernames:
params.update(get_sources(only_urls=True))
@@ -30,36 +30,40 @@
# very temporary:
# params['sources'] = MAP_SOURCES
-params['styles'] = {
- "footway_categories" : create_base_style(),
- 'crossings_and_kerbs' : create_crossings_kerbs_style()
+params["styles"] = {
+ "footway_categories": create_base_style(),
+ "crossings_and_kerbs": create_crossings_kerbs_style(),
}
interest_attributes = {
# key is raw attribute name, value is label (human readable)
- "surface" : "Surface",
- "smoothness" : "Smoothness",
- "tactile_paving" : "Tactile Paving",
- 'lit' : "Lighting",
- 'traffic_calming' : "Traffic Calming",
- "wheelchair" : 'wheelchair=* tag',
+ "surface": "Surface",
+ "smoothness": "Smoothness",
+ "tactile_paving": "Tactile Paving",
+ "lit": "Lighting",
+ "traffic_calming": "Traffic Calming",
+ "wheelchair": "wheelchair=* tag",
}
attribute_layers = {
# default is "sidewalks", only specified if different:
- 'traffic_calming' : 'crossings',
+ "traffic_calming": "crossings",
}
different_else_color = {
# default is "gray", specifyed ony if different:
- 'traffic_calming' : '#63636366',
+ "traffic_calming": "#63636366",
}
for attribute in interest_attributes:
- color_dict = get_color_dict(attribute,attribute_layers.get(attribute,'sidewalks'))
- color_schema = create_maplibre_color_schema(color_dict,attribute,different_else_color.get(attribute,'gray'))
-
- params['styles'][attribute] = create_simple_map_style(interest_attributes[attribute],color_schema,color_dict,attribute)
+ color_dict = get_color_dict(attribute, attribute_layers.get(attribute, "sidewalks"))
+ color_schema = create_maplibre_color_schema(
+ color_dict, attribute, different_else_color.get(attribute, "gray")
+ )
+
+ params["styles"][attribute] = create_simple_map_style(
+ interest_attributes[attribute], color_schema, color_dict, attribute
+ )
# reading the base html
webmap_html = file_as_string(webmap_base_path)
@@ -67,9 +71,9 @@
# doing other stuff like insertions and nasty things (TODO):
# finally generate the files:
-str_to_file(webmap_html,webmap_path)
-dump_json(params,webmap_params_path)
+str_to_file(webmap_html, webmap_path)
+dump_json(params, webmap_params_path)
# if we are in dev mode, also dump the original params:
if in_dev:
- dump_json(params,webmap_params_original_path)
\ No newline at end of file
+ dump_json(params, webmap_params_original_path)
diff --git a/webmap/standalone_legend.py b/webmap/standalone_legend.py
index 7c46d51..d57520b 100644
--- a/webmap/standalone_legend.py
+++ b/webmap/standalone_legend.py
@@ -3,6 +3,9 @@
from matplotlib.lines import Line2D
from matplotlib.patches import Patch
+from functions import *
+
+
# thx ChatGPT, employed to transform a scratch code into the class
class StandaloneLegend:
def __init__(self):
@@ -11,7 +14,7 @@ def __init__(self):
self.legend_labels = []
self.legendFig = plt.figure("Legend plot")
- def add_line(self, label='Line', **kwargs):
+ def add_line(self, label="Line", **kwargs):
"""
Add a line to the legend with customizable parameters.
@@ -19,11 +22,14 @@ def add_line(self, label='Line', **kwargs):
label (str): The label for the line element.
**kwargs: Additional keyword arguments for Line2D.
"""
+
+ rename_dict_key(kwargs, "width", "linewidth")
+
line = Line2D([0], [0], label=label, **kwargs)
self.legend_elements.append(line)
self.legend_labels.append(label)
- def add_marker(self, marker='o', label='Marker', **kwargs):
+ def add_marker(self, marker="o", label="Marker", **kwargs):
"""
Add a marker to the legend with customizable parameters.
@@ -32,13 +38,19 @@ def add_marker(self, marker='o', label='Marker', **kwargs):
label (str): The label for the marker element.
**kwargs: Additional keyword arguments for Line2D.
"""
+
+ # to standardize the key names
+ rename_dict_key(kwargs, "color", "markerfacecolor")
+ rename_dict_key(kwargs, "width", "markersize")
+
# Add default transparent color to kwargs if not provided
- kwargs.setdefault('color', (0.0, 0.0, 0.0, 0.0))
+ kwargs.setdefault("color", (0.0, 0.0, 0.0, 0.0))
+ kwargs.setdefault("markeredgecolor", (0.0, 0.0, 0.0, 0.0))
marker = Line2D([0], [0], marker=marker, label=label, **kwargs)
self.legend_elements.append(marker)
self.legend_labels.append(label)
- def add_patch(self, facecolor='orange', edgecolor='w', label='Patch', **kwargs):
+ def add_patch(self, facecolor="orange", edgecolor="w", label="Patch", **kwargs):
"""
Add a patch to the legend with customizable parameters.
@@ -48,6 +60,12 @@ def add_patch(self, facecolor='orange', edgecolor='w', label='Patch', **kwargs):
label (str): The label for the patch element.
**kwargs: Additional keyword arguments for Patch.
"""
+
+ # rename_dict_key(kwargs, "facecolor", "markerfacecolor")
+
+ if "width" in kwargs:
+ kwargs.pop("width")
+
patch = Patch(facecolor=facecolor, edgecolor=edgecolor, label=label, **kwargs)
self.legend_elements.append(patch)
self.legend_labels.append(label)
@@ -61,15 +79,17 @@ def add_element(self, element_type, label, **kwargs):
label (str): The label for the element.
**kwargs: Additional keyword arguments for the element.
"""
- if element_type == 'line':
+ if element_type == "line":
self.add_line(label=label, **kwargs)
- elif element_type == 'marker' or element_type == 'circle':
+ elif element_type == "marker" or element_type == "circle":
self.add_marker(label=label, **kwargs)
- elif element_type == 'patch' or element_type == 'fill':
+ elif element_type == "patch" or element_type == "fill":
self.add_patch(label=label, **kwargs)
else:
- raise ValueError(f"Unknown element type: {element_type}. Supported types are 'line', 'marker'/'circle', and 'patch'/'fill'.")
-
+ raise ValueError(
+ f"Unknown element type: {element_type}. Supported types are 'line', 'marker'/'circle', and 'patch'/'fill'."
+ )
+
def add_elements(self, elements):
"""
Add multiple custom elements to the legend.
@@ -84,12 +104,15 @@ def __hash__(self) -> int:
# enable hashing of the object, example: legend = StandaloneLegend(); hash(legend)
return hash((self.legend_elements, self.legend_labels))
- def export(self, filename='legend.png'):
+ def export(self, filename="legend.png"):
# Export the legend to an image file
- self.legendFig.legend(handles=self.legend_elements, labels=self.legend_labels, loc='center')
- self.legendFig.savefig(filename, bbox_inches='tight', transparent=True)
+ self.legendFig.legend(
+ handles=self.legend_elements, labels=self.legend_labels, loc="center"
+ )
+ self.legendFig.savefig(filename, bbox_inches="tight", transparent=True)
plt.close(self.legendFig) # Close the figure to free memory
+
# # Example usage:
# legend = StandaloneLegend()
# legend.add_line(color='b', linewidth=4, label='Line')
diff --git a/webmap/webmap_lib.py b/webmap/webmap_lib.py
index b95514b..4cdef60 100644
--- a/webmap/webmap_lib.py
+++ b/webmap/webmap_lib.py
@@ -1,87 +1,82 @@
import sys
-sys.path.append('oswm_codebase')
-from functions import *
-from copy import deepcopy
+sys.path.append("oswm_codebase")
+from copy import deepcopy
from standalone_legend import *
-MAP_DATA_LAYERS = [l for l in paths_dict['map_layers']]
+
+MAP_DATA_LAYERS = [l for l in paths_dict["map_layers"]]
# webmap stuff:
-BASEMAP_URL = 'https://a.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}.png'
-webmap_params_original_path = 'oswm_codebase/webmap/webmap_params.json'
-webmap_params_path = 'webmap_params.json'
-webmap_base_path = 'oswm_codebase/webmap/webmap_base.html'
-webmap_path = 'map.html'
+BASEMAP_URL = "https://a.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}.png"
+webmap_params_original_path = "oswm_codebase/webmap/webmap_params.json"
+webmap_params_path = "webmap_params.json"
+webmap_base_path = "oswm_codebase/webmap/webmap_base.html"
+webmap_path = "map.html"
-assets_path = 'oswm_codebase/assets/'
-map_symbols_assets_path = os.path.join(assets_path, 'map_symbols')
+assets_path = "oswm_codebase/assets/"
+map_symbols_assets_path = os.path.join(assets_path, "map_symbols")
# mapping geometry types to maplibre style
map_geom_type_mapping = {
- 'Polygon':'fill',
- 'LineString':'line',
- 'Point':'circle',
- 'MultiPolygon':'fill',
- 'MultiLineString':'line',
- 'MultiPoint':'circle'
- }
+ "Polygon": "fill",
+ "LineString": "line",
+ "Point": "circle",
+ "MultiPolygon": "fill",
+ "MultiLineString": "line",
+ "MultiPoint": "circle",
+}
# types for each layer:
-layertypes_dict = { k: map_geom_type_mapping[v] for k,v in all_layers_geom_types.items() }
+layertypes_dict = {
+ k: map_geom_type_mapping[v] for k, v in all_layers_geom_types.items()
+}
# the layers by type:
-line_layers = [l for l in MAP_DATA_LAYERS if layertypes_dict[l] == 'line']
-fill_layers = [l for l in MAP_DATA_LAYERS if layertypes_dict[l] == 'fill']
-circle_layers = [l for l in MAP_DATA_LAYERS if layertypes_dict[l] == 'circle']
+line_layers = [l for l in MAP_DATA_LAYERS if layertypes_dict[l] == "line"]
+fill_layers = [l for l in MAP_DATA_LAYERS if layertypes_dict[l] == "fill"]
+circle_layers = [l for l in MAP_DATA_LAYERS if layertypes_dict[l] == "circle"]
layer_type_groups = {
# the order in this dict determines the order in the webmap:
- 'fill':fill_layers,
- 'line':line_layers,
- 'circle':circle_layers
+ "fill": fill_layers,
+ "line": line_layers,
+ "circle": circle_layers,
}
# immutable layers, among different styles:
-immutable_layers= [{
- "id": "osm-baselayer",
- "source": "osm",
- "type": "raster",
- "paint": {
- "raster-opacity": .9
- }
- },
- {
- "id": "boundaries",
- "type": "line",
- "source": "boundaries",
- "paint": {
- "line-color": "white",
- "line-opacity": 0.4
- }
- }]
+immutable_layers = [
+ {
+ "id": "osm-baselayer",
+ "source": "osm",
+ "type": "raster",
+ "paint": {"raster-opacity": 0.9},
+ },
+ {
+ "id": "boundaries",
+ "type": "line",
+ "source": "boundaries",
+ "paint": {"line-color": "white", "line-opacity": 0.4},
+ },
+]
# base dict for a map style:
-mapstyle_basedict = {
- "version": 8,
- "sources": {},
- "layers": []
-}
+mapstyle_basedict = {"version": 8, "sources": {}, "layers": []}
# base_dicts for each layer type:
- # "id": "pedestrian_areas",
- # "source": "oswm_pmtiles_pedestrian_areas",
- # "source-layer": "pedestrian_areas",
- # "type": "fill",
- # "paint": {
- # "fill-color": "gray",
- # "fill-opacity": 0.5
- # }
+# "id": "pedestrian_areas",
+# "source": "oswm_pmtiles_pedestrian_areas",
+# "source-layer": "pedestrian_areas",
+# "type": "fill",
+# "paint": {
+# "fill-color": "gray",
+# "fill-opacity": 0.5
+# }
layertypes_basedict = {
- 'line':{
+ "line": {
"id": "",
"source": "",
"source-layer": "",
@@ -89,15 +84,14 @@
"paint": {
"line-color": "steelblue",
"line-width": [
- 'case',
- ['boolean', ['feature-state', 'hover'], False],
- 6,
- 3
- ]
-
- }
+ "case",
+ ["boolean", ["feature-state", "hover"], False],
+ 6,
+ 3,
+ ],
+ },
},
- 'fill':{
+ "fill": {
"id": "",
"source": "",
"source-layer": "",
@@ -105,14 +99,14 @@
"paint": {
"fill-color": "steelblue",
"fill-opacity": [
- 'case',
- ['boolean', ['feature-state', 'hover'], False],
- 0.8,
- 0.5
- ]
- }
+ "case",
+ ["boolean", ["feature-state", "hover"], False],
+ 0.8,
+ 0.5,
+ ],
+ },
},
- 'circle':{
+ "circle": {
"id": "",
"source": "",
"source-layer": "",
@@ -122,257 +116,294 @@
"circle-color": "steelblue",
"circle-opacity": 0.8,
"circle-radius": [
- 'case',
- ['boolean', ['feature-state', 'hover'], False],
- 7,
- 4
- ]
- }
- }
+ "case",
+ ["boolean", ["feature-state", "hover"], False],
+ 7,
+ 4,
+ ],
+ },
+ },
}
-color_attribute = {
- 'fill':'fill-color',
- 'line':'line-color',
- 'circle':'circle-color'
-}
+color_attribute = {"fill": "fill-color", "line": "line-color", "circle": "circle-color"}
+
-def get_sources(terrain_url=None,only_urls=False):
+def get_sources(terrain_url=None, only_urls=False):
ret = {}
- ret['sources'] = {}
-
- for layername in paths_dict['map_layers']:
- ret[f'{layername}_url'] = f'{node_homepage_url}data/tiles/{layername}.pmtiles'
-
- ret['sources'][f'oswm_pmtiles_{layername}'] = {
+ ret["sources"] = {}
+
+ for layername in paths_dict["map_layers"]:
+ ret[f"{layername}_url"] = f"{node_homepage_url}data/tiles/{layername}.pmtiles"
+
+ ret["sources"][f"oswm_pmtiles_{layername}"] = {
"type": "vector",
"url": f"pmtiles://{ret[f'{layername}_url']}",
- "promoteId":"id",
- "attribution": r'© OpenStreetMap Contributors'}
-
- ret['boundaries_url'] = f'{node_homepage_url}data/boundaries.geojson'
+ "promoteId": "id",
+ "attribution": r'© OpenStreetMap Contributors',
+ }
+ ret["boundaries_url"] = f"{node_homepage_url}data/boundaries.geojson"
# basemap:
- ret['sources']['osm'] = {
+ ret["sources"]["osm"] = {
"type": "raster",
"tiles": [BASEMAP_URL],
- "attribution": r'© OpenStreetMap Contributors; basemap by CARTO'
+ "attribution": r'© OpenStreetMap Contributors; basemap by CARTO',
}
# boundaries:
- ret['sources']['boundaries'] = {
+ ret["sources"]["boundaries"] = {
"type": "geojson",
- "data": ret['boundaries_url'],
- "attribution": r'© OpenStreetMap Contributors'
+ "data": ret["boundaries_url"],
+ "attribution": r'© OpenStreetMap Contributors',
}
-
+
if terrain_url:
# # # terrain:
- ret['sources']['terrain'] = {
+ ret["sources"]["terrain"] = {
"type": "raster-dem",
"url": terrain_url,
- "tileSize": 256
+ "tileSize": 256,
}
-
+
if only_urls:
- del ret['sources']
+ del ret["sources"]
return ret
else:
return ret
-
-MAP_SOURCES = get_sources()['sources']
-
+
+
+MAP_SOURCES = get_sources()["sources"]
+
+
def sort_keys_by_order(input_dict, order_list):
ordered_keys = []
remaining_keys = list(input_dict.keys())
-
+
for order in order_list:
for key in list(remaining_keys):
if input_dict[key] == order:
ordered_keys.append(key)
remaining_keys.remove(key)
-
+
ordered_keys.extend(remaining_keys)
-
+
return ordered_keys
+
ordered_map_layers = sort_keys_by_order(layertypes_dict, layer_type_groups.keys())
-def create_base_style(sources=MAP_SOURCES,name='Footway Categories'):
+def create_base_style(sources=MAP_SOURCES, name="Footway Categories"):
+
+ default_color = "steelblue"
custom_layer_colors = {
- 'stairways':'#8a7e2f',
- 'main_footways':'#299077',
- 'informal_footways':'#b0645a',
- 'potential_footways':'#9569a4',
+ "stairways": "#8a7e2f",
+ "main_footways": "#299077",
+ "informal_footways": "#b0645a",
+ "potential_footways": "#9569a4",
}
custom_layer_dash_patterns = {
- "crossings": [1,0.5],
+ "crossings": [1, 0.5],
}
-
+
+ custom_legend_widths = {"kerbs": 8}
+
style_dict = deepcopy(mapstyle_basedict)
-
- style_dict['sources'] = sources
-
- style_dict['name'] = name
-
- style_dict['layers'].extend(deepcopy(immutable_layers))
-
+
+ style_dict["sources"] = sources
+
+ style_dict["name"] = name
+
+ style_dict["layers"].extend(deepcopy(immutable_layers))
+
+ # declaring the legend:
+ style_legend = StandaloneLegend()
+
for layername in ordered_map_layers:
layer_type = layertypes_dict[layername]
-
+
layer_dict = deepcopy(layertypes_basedict[layer_type])
-
+
# now we can set the id and source:
- layer_dict['id'] = layername
- layer_dict['source'] = f'oswm_pmtiles_{layername}'
- layer_dict['source-layer'] = layername
+ layer_dict["id"] = layername
+ layer_dict["source"] = f"oswm_pmtiles_{layername}"
+ layer_dict["source-layer"] = layername
if layername in custom_layer_colors:
- layer_dict['paint']['line-color'] = custom_layer_colors[layername]
-
+ layer_dict["paint"]["line-color"] = custom_layer_colors[layername]
+
if layername in custom_layer_dash_patterns:
- layer_dict['paint']['line-dasharray'] = custom_layer_dash_patterns[layername]
+ layer_dict["paint"]["line-dasharray"] = custom_layer_dash_patterns[
+ layername
+ ]
# now the custom colors
-
- style_dict['layers'].append(layer_dict)
-
+ style_dict["layers"].append(layer_dict)
+
+ # adding to the legend:
+ style_legend.add_element(
+ layer_type,
+ layername,
+ **{
+ "color": custom_layer_colors.get(layername, default_color),
+ "width": custom_legend_widths.get(layername, 4),
+ },
+ )
+
+ style_legend.export(
+ os.path.join(map_symbols_assets_path, "footway_categories" + ".png")
+ )
+
return style_dict
-def create_simple_map_style(name,color_schema,color_dict,filename,sources=MAP_SOURCES,generate_shadow_layers=False):
+
+def create_simple_map_style(
+ name,
+ color_schema,
+ color_dict,
+ filename,
+ sources=MAP_SOURCES,
+ generate_shadow_layers=False,
+):
style_dict = deepcopy(mapstyle_basedict)
-
- style_dict['sources'] = sources
-
- style_dict['name'] = name
-
- style_dict['layers'].extend(deepcopy(immutable_layers))
-
+
+ style_dict["sources"] = sources
+
+ style_dict["name"] = name
+
+ style_dict["layers"].extend(deepcopy(immutable_layers))
+
# creating "shadow layers" for line layers only:
if generate_shadow_layers:
for layername in ordered_map_layers:
- if layertypes_dict[layername] == 'line':
- layer_dict = deepcopy(layertypes_basedict['line'])
- layer_dict['id'] = f'{layername}_shadow'
- layer_dict['source'] = f'oswm_pmtiles_{layername}'
- layer_dict['source-layer'] = layername
- layer_dict['paint']['line-color'] = 'black'
- layer_dict['paint']['line-width'] = 4
-
- style_dict['layers'].append(layer_dict)
-
+ if layertypes_dict[layername] == "line":
+ layer_dict = deepcopy(layertypes_basedict["line"])
+ layer_dict["id"] = f"{layername}_shadow"
+ layer_dict["source"] = f"oswm_pmtiles_{layername}"
+ layer_dict["source-layer"] = layername
+ layer_dict["paint"]["line-color"] = "black"
+ layer_dict["paint"]["line-width"] = 4
+
+ style_dict["layers"].append(layer_dict)
+
for layername in ordered_map_layers:
layer_type = layertypes_dict[layername]
-
+
layer_dict = deepcopy(layertypes_basedict[layer_type])
-
+
# now we can set the id and source:
- layer_dict['id'] = layername
- layer_dict['source'] = f'oswm_pmtiles_{layername}'
- layer_dict['source-layer'] = layername
-
+ layer_dict["id"] = layername
+ layer_dict["source"] = f"oswm_pmtiles_{layername}"
+ layer_dict["source-layer"] = layername
+
# layer_type = layertypes_dict[layername]
-
- layer_dict['paint'][color_attribute[layer_type]] = color_schema
-
-
- style_dict['layers'].append(layer_dict)
+
+ layer_dict["paint"][color_attribute[layer_type]] = color_schema
+
+ style_dict["layers"].append(layer_dict)
# now generating the map symbols
# TODO: check the hashing, otherwise no need to re-run
style_legend = StandaloneLegend()
+ custom_line_args = {
+ "linewidth": 4,
+ }
+
for key in color_dict:
- style_legend.add_line(label=key, color=color_dict[key])
+ style_legend.add_line(label=key, color=color_dict[key], **custom_line_args)
- style_legend.add_line(label='other', color=color_schema[-1])
+ style_legend.add_line(label="other", color=color_schema[-1], **custom_line_args)
+
+ style_legend.export(os.path.join(map_symbols_assets_path, f"{filename}.png"))
- style_legend.export(os.path.join(map_symbols_assets_path,f'{filename}.png'))
-
return style_dict
-def get_color_dict(columnname,layer='sidewalks',attribute='color'):
+
+def get_color_dict(columnname, layer="sidewalks", attribute="color"):
"""
Given a columnname, layername and attribute, returns a dictionary mapping each value in the column to its corresponding attribute value.
-
+
:param columnname: column name
:param layer: layer name (default to 'sidewalks')
:param attribute: attribute name (default to 'color')
-
+
:return: a dictionary mapping each value in the column to its corresponding attribute value
"""
colordict = {}
-
+
base_dict = fields_values_properties[layer][columnname]
-
+
for key in base_dict:
colordict[key] = base_dict[key][attribute]
-
+
return colordict
-def create_maplibre_color_schema(attribute_dict,attribute_name, else_color="gray"):
+
+def create_maplibre_color_schema(attribute_dict, attribute_name, else_color="gray"):
schema = ["case"]
for key, value in attribute_dict.items():
- schema.extend([
- ["==", ["get", attribute_name], key],
- value
- ])
+ schema.extend([["==", ["get", attribute_name], key], value])
schema.append(else_color)
return schema
-def create_crossings_kerbs_style(sources=MAP_SOURCES,name='Crossings and Kerbs',else_color='#63636380'):
+
+def create_crossings_kerbs_style(
+ sources=MAP_SOURCES, name="Crossings and Kerbs", else_color="#63636380"
+):
style_dict = deepcopy(mapstyle_basedict)
-
- style_dict['sources'] = sources
-
- style_dict['name'] = name
-
- style_dict['layers'].extend(deepcopy(immutable_layers))
+
+ style_dict["sources"] = sources
+
+ style_dict["name"] = name
+
+ style_dict["layers"].extend(deepcopy(immutable_layers))
interest_layers = {
# layername : tag key
- 'crossings' : 'crossing',
- 'kerbs' : 'kerb',
+ "crossings": "crossing",
+ "kerbs": "kerb",
}
for layername in ordered_map_layers:
layer_type = layertypes_dict[layername]
-
+
layer_dict = deepcopy(layertypes_basedict[layer_type])
if layername in interest_layers:
# layer_dict.update(custom_crossing_kerbs_dict[layername])
- color_dict = get_color_dict(interest_layers[layername],layername)
- color_schema = create_maplibre_color_schema(color_dict,interest_layers[layername],'gray')
- layer_dict['paint'][color_attribute[layer_type]] = color_schema
+ color_dict = get_color_dict(interest_layers[layername], layername)
+ color_schema = create_maplibre_color_schema(
+ color_dict, interest_layers[layername], "gray"
+ )
+ layer_dict["paint"][color_attribute[layer_type]] = color_schema
# making kerbs a little bigger
- if layername == 'kerbs':
- layer_dict['paint']['circle-radius'] = [
- 'case',
- ['boolean', ['feature-state', 'hover'], False],
+ if layername == "kerbs":
+ layer_dict["paint"]["circle-radius"] = [
+ "case",
+ ["boolean", ["feature-state", "hover"], False],
8,
- 5
+ 5,
]
else:
# all other layers will be a very faded gray:
- layer_dict['paint'][color_attribute[layer_type]] = else_color
+ layer_dict["paint"][color_attribute[layer_type]] = else_color
# now we can set the id and source:
- layer_dict['id'] = layername
- layer_dict['source'] = f'oswm_pmtiles_{layername}'
- layer_dict['source-layer'] = layername
-
- style_dict['layers'].append(layer_dict)
+ layer_dict["id"] = layername
+ layer_dict["source"] = f"oswm_pmtiles_{layername}"
+ layer_dict["source-layer"] = layername
+ style_dict["layers"].append(layer_dict)
return style_dict
+
# call just once:
create_folderlist([map_symbols_assets_path])