Skip to content

Commit

Permalink
del: 각 크롤러별 logging.basicConfig 삭제
Browse files Browse the repository at this point in the history
  • Loading branch information
WhiteHyun committed Apr 19, 2024
1 parent f523676 commit 8c939a5
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 6 deletions.
2 changes: 1 addition & 1 deletion Crawler/base/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ class Crawler(ABC):
_is_valid_image(session, image_url): 주어진 이미지 URL이 유효한 이미지인지 확인하는 비동기 유틸리티 메서드입니다.
"""

logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.DEBUG)
__logger = logging.getLogger(__name__)

@property
Expand Down
3 changes: 2 additions & 1 deletion Crawler/cu_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@


class CUCrawler(Crawler):
logging.basicConfig(level=logging.INFO)
__logger = logging.getLogger(__name__)
_base_url = "https://cu.bgfretail.com/event/plusAjax.do"
__search_conditions = [23, 24] # 1+1, 2+1
Expand Down Expand Up @@ -75,7 +74,9 @@ async def execute(self) -> list[EventItem]:
break
data_array.extend(event_items)

self.__logger.debug(f"PageNumber Increasing... {page_num}")
page_num += 1
self.__logger.debug(f"CU: {search_condition} Done.")

return data_array

Expand Down
3 changes: 2 additions & 1 deletion Crawler/emart24_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@


class Emart24Crawler(Crawler):
logging.basicConfig(level=logging.INFO)
__logger = logging.getLogger(__name__)
_base_url = "https://emart24.co.kr/goods/event"
__category_seqs = [1, 2] # 1+1, 2+1 각각 가져오기
Expand Down Expand Up @@ -74,7 +73,9 @@ async def execute(self) -> list[EventItem]:
break
data_array.extend(event_items)

self.__logger.debug(f"PageNumber Increasing... {page_num}")
page_num += 1
self.__logger.debug(f"emart24: {category_seq} Done.")

return data_array

Expand Down
5 changes: 3 additions & 2 deletions Crawler/gs_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@


class GSCrawler(Crawler):
logging.basicConfig(level=logging.INFO)
__logger = logging.getLogger(__name__)
_base_url = "http://gs25.gsretail.com/gscvs/ko/products/event-goods-search"
__parameter_lists = ["ONE_TO_ONE", "TWO_TO_ONE"]
Expand Down Expand Up @@ -67,15 +66,17 @@ async def execute(self):
total_pages = json_data["pagination"]["numberOfPages"]
if page_num >= total_pages:
break

self.__logger.debug(f"PageNumber Increasing... {page_num}")
page_num += 1
self.__logger.debug(f"GS: {parameter_list} Done.")

return data_array


async def main():
crawler = GSCrawler()
items = await crawler.execute()
print(len(items))


if __name__ == "__main__":
Expand Down
3 changes: 2 additions & 1 deletion Crawler/seven_eleven_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@


class SevenElevenCrawler(Crawler):
logging.basicConfig(level=logging.INFO)
__logger = logging.getLogger(__name__)
_base_url = "https://www.7-eleven.co.kr/product/listMoreAjax.asp"
__promotion_conditions = [1, 2] # 1+1, 2+1
Expand Down Expand Up @@ -87,7 +86,9 @@ async def execute(self) -> list[EventItem]:
break
data_array.extend(event_items)

self.__logger.debug(f"PageNumber Increasing... {page_num}")
page_num += 1
self.__logger.debug(f"7-Eleven: {promotion_condition} Done.")

return data_array

Expand Down

0 comments on commit 8c939a5

Please sign in to comment.