From 8c939a50a613e4e466f8f23f032c87e69a3ca773 Mon Sep 17 00:00:00 2001 From: SeungHyun Hong Date: Fri, 19 Apr 2024 18:51:11 +0900 Subject: [PATCH] =?UTF-8?q?del:=20=EA=B0=81=20=ED=81=AC=EB=A1=A4=EB=9F=AC?= =?UTF-8?q?=EB=B3=84=20=20logging.basicConfig=20=EC=82=AD=EC=A0=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Crawler/base/crawler.py | 2 +- Crawler/cu_crawler.py | 3 ++- Crawler/emart24_crawler.py | 3 ++- Crawler/gs_crawler.py | 5 +++-- Crawler/seven_eleven_crawler.py | 3 ++- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/Crawler/base/crawler.py b/Crawler/base/crawler.py index 1a79594..2479fba 100644 --- a/Crawler/base/crawler.py +++ b/Crawler/base/crawler.py @@ -19,7 +19,7 @@ class Crawler(ABC): _is_valid_image(session, image_url): 주어진 이미지 URL이 유효한 이미지인지 확인하는 비동기 유틸리티 메서드입니다. """ - logging.basicConfig(level=logging.INFO) + logging.basicConfig(level=logging.DEBUG) __logger = logging.getLogger(__name__) @property diff --git a/Crawler/cu_crawler.py b/Crawler/cu_crawler.py index 9fa8339..1773437 100644 --- a/Crawler/cu_crawler.py +++ b/Crawler/cu_crawler.py @@ -14,7 +14,6 @@ class CUCrawler(Crawler): - logging.basicConfig(level=logging.INFO) __logger = logging.getLogger(__name__) _base_url = "https://cu.bgfretail.com/event/plusAjax.do" __search_conditions = [23, 24] # 1+1, 2+1 @@ -75,7 +74,9 @@ async def execute(self) -> list[EventItem]: break data_array.extend(event_items) + self.__logger.debug(f"PageNumber Increasing... {page_num}") page_num += 1 + self.__logger.debug(f"CU: {search_condition} Done.") return data_array diff --git a/Crawler/emart24_crawler.py b/Crawler/emart24_crawler.py index 412f3b1..409208a 100644 --- a/Crawler/emart24_crawler.py +++ b/Crawler/emart24_crawler.py @@ -14,7 +14,6 @@ class Emart24Crawler(Crawler): - logging.basicConfig(level=logging.INFO) __logger = logging.getLogger(__name__) _base_url = "https://emart24.co.kr/goods/event" __category_seqs = [1, 2] # 1+1, 2+1 각각 가져오기 @@ -74,7 +73,9 @@ async def execute(self) -> list[EventItem]: break data_array.extend(event_items) + self.__logger.debug(f"PageNumber Increasing... {page_num}") page_num += 1 + self.__logger.debug(f"emart24: {category_seq} Done.") return data_array diff --git a/Crawler/gs_crawler.py b/Crawler/gs_crawler.py index 493c057..f220583 100644 --- a/Crawler/gs_crawler.py +++ b/Crawler/gs_crawler.py @@ -14,7 +14,6 @@ class GSCrawler(Crawler): - logging.basicConfig(level=logging.INFO) __logger = logging.getLogger(__name__) _base_url = "http://gs25.gsretail.com/gscvs/ko/products/event-goods-search" __parameter_lists = ["ONE_TO_ONE", "TWO_TO_ONE"] @@ -67,8 +66,9 @@ async def execute(self): total_pages = json_data["pagination"]["numberOfPages"] if page_num >= total_pages: break - + self.__logger.debug(f"PageNumber Increasing... {page_num}") page_num += 1 + self.__logger.debug(f"GS: {parameter_list} Done.") return data_array @@ -76,6 +76,7 @@ async def execute(self): async def main(): crawler = GSCrawler() items = await crawler.execute() + print(len(items)) if __name__ == "__main__": diff --git a/Crawler/seven_eleven_crawler.py b/Crawler/seven_eleven_crawler.py index 2fca452..e188b52 100644 --- a/Crawler/seven_eleven_crawler.py +++ b/Crawler/seven_eleven_crawler.py @@ -14,7 +14,6 @@ class SevenElevenCrawler(Crawler): - logging.basicConfig(level=logging.INFO) __logger = logging.getLogger(__name__) _base_url = "https://www.7-eleven.co.kr/product/listMoreAjax.asp" __promotion_conditions = [1, 2] # 1+1, 2+1 @@ -87,7 +86,9 @@ async def execute(self) -> list[EventItem]: break data_array.extend(event_items) + self.__logger.debug(f"PageNumber Increasing... {page_num}") page_num += 1 + self.__logger.debug(f"7-Eleven: {promotion_condition} Done.") return data_array