Skip to content

Commit

Permalink
prepare for automatic docu generation
Browse files Browse the repository at this point in the history
- check source titles
- check source urls
- add extra info for sources which serve multiple
  districts/municipalities
  • Loading branch information
mampfes committed Dec 28, 2022
1 parent 322c090 commit 794e280
Show file tree
Hide file tree
Showing 118 changed files with 781 additions and 486 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,12 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "AbfallPlus"
TITLE = "Abfall.IO / AbfallPlus"
DESCRIPTION = (
"Source for AbfallPlus.de waste collection. Service is hosted on abfall.io."
)
URL = "https://www.abfallplus.de"
COUNTRY = "de"
TEST_CASES = {
"Waldenbuch": {
"key": "8215c62763967916979e0e8566b6172e",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
from waste_collection_schedule import Collection
from waste_collection_schedule.service.ICS import ICS

TITLE = "Abfall Neunkirchen Siegerland"
TITLE = "Neunkirchen Siegerland"
DESCRIPTION = " Source for 'Abfallkalender Neunkirchen Siegerland'."
URL = "https://www.neunkirchen-siegerland.de"
TEST_CASES = {
"Waldstraße":{ "street":"Waldstr"}
"Waldstraße":{ "strasse":"Waldstr"}
}

_LOGGER = logging.getLogger(__name__)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "Abfall Zollernalbkreis"
TITLE = "Abfallwirtschaft Zollernalbkreis"
DESCRIPTION = "Source for Abfallwirtschaft Zollernalbkreis waste collection."
URL = "https://www.abfallkalender-zak.de"
TEST_CASES = {
Expand Down Expand Up @@ -42,22 +42,23 @@
},
}

ICON_MAP = {
"Restmüll": "mdi:trash-can",
"Grünabfall" : "mdi:leaf",
"Gelber Sack" : "mdi:sack",
"Papiertonne" : "mdi:package-variant",
"Bildschirm-/Kühlgeräte" : "mdi:television-classic",
"Schadstoffsammlung" : "mdi:biohazard",
"altmetalle" : "mdi:nail",
}


class Source:
def __init__(self, city, types, street=None):
self._city = city
self._street = street
self._types = types
self._ics = ICS()
self._iconMap = {
"Restmüll": "mdi:trash-can",
"Grünabfall" : "mdi:leaf",
"Gelber Sack" : "mdi:sack",
"Papiertonne" : "mdi:package-variant",
"Bildschirm-/Kühlgeräte" : "mdi:television-classic",
"Schadstoffsammlung" : "mdi:biohazard",
"altmetalle" : "mdi:nail",
}

def fetch(self):
now = datetime.now()
Expand Down Expand Up @@ -95,6 +96,6 @@ def fetch_year(self, year, city, street, types):
waste_type = d[1]
next_pickup_date = d[0]

entries.append(Collection(date=next_pickup_date, t=waste_type, icon=self._iconMap.get(waste_type,"mdi:trash-can")))
entries.append(Collection(date=next_pickup_date, t=waste_type, icon=ICON_MAP.get(waste_type,"mdi:trash-can")))

return entries
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.AbfallnaviDe import AbfallnaviDe

TITLE = "AbfallNavi"
TITLE = "AbfallNavi (RegioIT.de)"
DESCRIPTION = (
"Source for AbfallNavi waste collection. AbfallNavi is a brand name of regioit.de."
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "Landkreis Forchheim"
TITLE = "Abfalltermine Forchheim"
DESCRIPTION = "Source for Landkreis Forchheim"
URL = "https://www.abfalltermine-forchheim.de/"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import requests
from waste_collection_schedule import Collection # type: ignore[attr-defined]

TITLE = "ALW Wolfenbüttel"
TITLE = "Abfallwirtschaft Landkreis Wolfenbüttel"
DESCRIPTION = "Source for ALW Wolfenbüttel."
URL = "https://abfallapp.alw-wf.de"
URL = "https://alw-wf.de"
TEST_CASES = {
"Linden alte Straße": {"ort": "Linden mit Okertalsiedlung", "strasse": "Siedlung"},
"Linden neuere Straße": {
Expand All @@ -17,6 +17,7 @@
"Dettum": {"ort": "Dettum", "strasse": "Egal!"},
}

API_URL = "https://abfallapp.alw-wf.de"
AUTH_DATA = {
"auth": {
"Name": "ALW",
Expand All @@ -41,7 +42,7 @@ def fetch(self):
auth_params = json.dumps(AUTH_DATA)

# ALW WF uses a self-signed certificate so we need to disable certificate verification
r = requests.post(f"{URL}/GetOrte.php", data=auth_params, verify=False)
r = requests.post(f"{API_URL}/GetOrte.php", data=auth_params, verify=False)
orte = r.json()
if orte["result"][0]["StatusCode"] != 200:
raise Exception(f"Error getting Orte: {orte['result'][0]['StatusMsg']}")
Expand All @@ -53,7 +54,7 @@ def fetch(self):
if ort_id is None:
raise Exception(f"Error finding Ort {self._ort}")

r = requests.post(f"{URL}/GetStrassen.php", data=auth_params, verify=False)
r = requests.post(f"{API_URL}/GetStrassen.php", data=auth_params, verify=False)
strassen = r.json()
if strassen["result"][0]["StatusCode"] != 200:
raise Exception(
Expand All @@ -73,7 +74,7 @@ def fetch(self):
if strasse_id is None:
raise Exception(f"Error finding Straße {self._strasse}")

r = requests.post(f"{URL}/GetArten.php", data=auth_params, verify=False)
r = requests.post(f"{API_URL}/GetArten.php", data=auth_params, verify=False)
arten = r.json()
if arten["result"][0]["StatusCode"] != 200:
raise Exception(f"Error getting Arten: {arten['result'][0]['StatusMsg']}")
Expand All @@ -84,7 +85,7 @@ def fetch(self):

entries = []
r = requests.post(
f"{URL}/GetTermine.php/{strasse_id}", data=auth_params, verify=False
f"{API_URL}/GetTermine.php/{strasse_id}", data=auth_params, verify=False
)
termine = r.json()
if termine["result"][0]["StatusCode"] != 200:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "Abfall ART Trier"
TITLE = "ART Trier"
DESCRIPTION = "Source for waste collection of ART Trier."
URL = "https://www.art-trier.de"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
# Include work around for SSL UNSAFE_LEGACY_RENEGOTIATION_DISABLED error
from waste_collection_schedule.service.SSLError import get_legacy_session

TITLE = "Auckland council"
TITLE = "Auckland Council"
DESCRIPTION = "Source for Auckland council."
URL = "https://aucklandcouncil.govt.nz"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "AW Harburg"
TITLE = "Abfallwirtschaft Landkreis Harburg"
DESCRIPTION = "Abfallwirtschaft Landkreis Harburg"
URL = "https://www.landkreis-harburg.de/bauen-umwelt/abfallwirtschaft/abfallkalender/"
URL = "https://www.landkreis-harburg.de"

TEST_CASES = {
"CityWithTwoLevels": {"level_1": "Hanstedt", "level_2": "Evendorf"},
Expand All @@ -16,6 +16,7 @@
},
}

API_URL = "https://www.landkreis-harburg.de/bauen-umwelt/abfallwirtschaft/abfallkalender/"
HEADERS = {
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64)",
}
Expand All @@ -33,11 +34,11 @@ def fetch(self):
# Get the IDs of the districts on the first level
# Double loading is on purpose because sometimes the webpage has an overlay
# which is gone on the second try in a session
r = session.get(URL, headers=HEADERS)
r = session.get(API_URL, headers=HEADERS)
if "Zur aufgerufenen Seite" in r.text:
r = session.get(URL, headers=HEADERS)
r = session.get(API_URL, headers=HEADERS)
if r.status_code != 200:
raise Exception(f"Error: failed to fetch first url: {URL}")
raise Exception(f"Error: failed to fetch first url: {API_URL}")

# Get the IDs of the districts on the first level
id = self.parse_level(r.text, 1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,13 @@

TITLE = "AWB Oldenburg"
DESCRIPTION = "Source for 'Abfallwirtschaftsbetrieb Stadt Oldenburg (Oldb)'."
URL = "https://services.oldenburg.de/index.php"
URL = "https://oldenburg.de"
TEST_CASES = {
"Polizeiinspektion Oldenburg": {"street": "Friedhofsweg", "house_number": 30}
}

API_URL = "https://services.oldenburg.de/index.php"


class Source:
def __init__(self, street, house_number):
Expand Down Expand Up @@ -39,7 +41,7 @@ def fetch(self):
args = urllib.parse.urlencode(args, quote_via=urllib.parse.quote)

# post request
r = requests.get(URL, params=args)
r = requests.get(API_URL, params=args)

dates = self._ics.convert(r.text)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import requests
from waste_collection_schedule import Collection # type: ignore[attr-defined]

TITLE = "AWIDO"
TITLE = "AWIDO Online"
DESCRIPTION = "Source for AWIDO waste collection."
URL = "https://www.awido-online.de/"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "AWN"
TITLE = "Abfallwirtschaft Neckar-Odenwald-Kreis"
DESCRIPTION = "Source for AWN (Abfallwirtschaft Neckar-Odenwald-Kreis)."
URL = "https://www.awn-online.de"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "AWR"
TITLE = "Abfallwirtschaft Rendsburg"
DESCRIPTION = "Source for Abfallwirtschaft Rendsburg"
URL = "https://www.awr.de"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "AWSH"
TITLE = "Abfallwirtschaft Südholstein"
DESCRIPTION = "Source for Abfallwirtschaft Südholstein"
URL = "https://www.awsh.de"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

TITLE = 'Banyule City Council'
DESCRIPTION = 'Source for Banyule City Council rubbish collection.'
URL = 'https://www.banyule.vic.gov.au/binday'
URL = 'https://www.banyule.vic.gov.au'
TEST_CASES = {
'Monday A': {'street_address': '6 Mandall Avenue, IVANHOE'},
'Monday A Geolocation ID': {'geolocation_id': '4f7ebfca-1526-4363-8b87-df3103a10a87'},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import requests
from waste_collection_schedule import Collection # type: ignore[attr-defined]

TITLE = "Berline Recycling"
TITLE = "Berlin Recycling"
DESCRIPTION = "Source for Berlin Recycling waste collection."
URL = "https://berlin-recycling.de"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

TITLE = "BMV.at"
TITLE = "Burgenländischer Müllverband"
DESCRIPTION = "Source for BMV, Austria"
URL = "https://www.bmv.at"
TEST_CASES = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from dateutil import parser
from waste_collection_schedule import Collection

TITLE = "bracknell-forest.gov.uk"
TITLE = "Bracknell Forest Council"
DESCRIPTION = "Bracknell Forest Council, UK - Waste Collection"
URL = "https://selfservice.mybfc.bracknell-forest.gov.uk"
TEST_CASES = {
Expand All @@ -13,7 +13,8 @@
"32 Ashbourne": {"house_number": "32", "post_code": "RG12 8SG"},
"1 Acacia Avenue": {"house_number": "1", "post_code": "GU47 0RU"},
}
ICONS = {

ICON_MAP = {
"General Waste": "mdi:trash-can",
"Recycling": "mdi:recycle",
"Garden": "mdi:leaf",
Expand Down Expand Up @@ -68,7 +69,7 @@ def fetch(self):
collection_lookup.raise_for_status()
collections = collection_lookup.json()["response"]["collections"]
entries = []
for waste_type in ICONS.keys():
for waste_type in ICON_MAP.keys():
try:
entries.append(
Collection(
Expand All @@ -78,7 +79,7 @@ def fetch(self):
]["date"]
).date(),
t=waste_type,
icon=ICONS[waste_type],
icon=ICON_MAP[waste_type],
)
)
except (StopIteration, TypeError):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,35 +11,36 @@
import ssl
import urllib3

TITLE = "Bradford.gov.uk"
TITLE = "Bradford Metropolitan District Council"
DESCRIPTION = (
"Source for Bradford.gov.uk services for Bradford Metropolitan Council, UK."
)
URL = "https://onlineforms.bradford.gov.uk/ufs/"
URL = "https://bradford.gov.uk"
TEST_CASES = {
"Ilkley": {"uprn": "100051250665"},
"Bradford": {"uprn": "100051239296"},
"Baildon": {"uprn": "10002329242"},
}

ICONS = {
API_URL = "https://onlineforms.bradford.gov.uk/ufs/"
ICON_MAP = {
"REFUSE": "mdi:trash-can",
"RECYCLING": "mdi:recycle",
"GARDEN": "mdi:leaf",
}

from pprint import pprint

class CustomHttpAdapter (requests.adapters.HTTPAdapter):
'''Transport adapter" that allows us to use custom ssl_context.'''

def __init__(self, ssl_context=None, **kwargs):
self.ssl_context = ssl_context
super().__init__(**kwargs)

def init_poolmanager(self, connections, maxsize, block=False):
self.poolmanager = urllib3.poolmanager.PoolManager(
num_pools=connections, maxsize=maxsize,
class CustomHttpAdapter (requests.adapters.HTTPAdapter):
'''Transport adapter" that allows us to use custom ssl_context.'''

def __init__(self, ssl_context=None, **kwargs):
self.ssl_context = ssl_context
super().__init__(**kwargs)

def init_poolmanager(self, connections, maxsize, block=False):
self.poolmanager = urllib3.poolmanager.PoolManager(
num_pools=connections, maxsize=maxsize,
block=block, ssl_context=self.ssl_context)

class Source:
Expand All @@ -59,7 +60,7 @@ def fetch(self):
s.cookies.set(
"COLLECTIONDATES", self._uprn, domain="onlineforms.bradford.gov.uk"
)
r = s.get(f"{URL}/collectiondates.eb")
r = s.get(f"{API_URL}/collectiondates.eb")

soup = BeautifulSoup(r.text, features="html.parser")
div = soup.find_all("table", {"role": "region"})
Expand Down Expand Up @@ -87,7 +88,7 @@ def fetch(self):
entry.text.strip(), "%a %b %d %Y"
).date(),
t=type,
icon=ICONS[type],
icon=ICON_MAP[type],
)
)
except ValueError:
Expand Down
Loading

0 comments on commit 794e280

Please sign in to comment.