Skip to content

Commit

Permalink
Merge pull request #503 from Lynxiayel/main
Browse files Browse the repository at this point in the history
  • Loading branch information
Evil0ctal authored Nov 18, 2024
2 parents b659af8 + d641f03 commit a7f87df
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 4 deletions.
3 changes: 2 additions & 1 deletion crawlers/tiktok/app/app_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@ async def get_tiktok_headers(self):
"Cookie": tiktok_config["headers"]["Cookie"],
"x-ladon": "Hello From Evil0ctal!",
},
"proxies": {"http://": None, "https://": None},
"proxies": {"http://": tiktok_config["proxies"]["http"],
"https://": tiktok_config["proxies"]["https"]}
}
return kwargs

Expand Down
7 changes: 4 additions & 3 deletions crawlers/tiktok/web/web_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,8 @@ async def get_tiktok_headers(self):
"Referer": tiktok_config["headers"]["Referer"],
"Cookie": tiktok_config["headers"]["Cookie"],
},
"proxies": {"http://": None, "https://": None},
"proxies": {"http://": tiktok_config["proxies"]["http"],
"https://": tiktok_config["proxies"]["https"]}
}
return kwargs

Expand Down Expand Up @@ -133,7 +134,7 @@ async def fetch_user_post(self, secUid: str, cursor: int = 0, count: int = 35, c
kwargs = await self.get_tiktok_headers()
# proxies = {"http://": 'http://43.159.29.191:24144', "https://": 'http://43.159.29.191:24144'}
# 创建一个基础爬虫
base_crawler = BaseCrawler(proxies=None, crawler_headers=kwargs["headers"])
base_crawler = BaseCrawler(proxies=kwargs["proxies"], crawler_headers=kwargs["headers"])
async with base_crawler as crawler:
# 创建一个用户作品的BaseModel参数
params = UserPost(secUid=secUid, cursor=cursor, count=count, coverFormat=coverFormat)
Expand Down Expand Up @@ -216,7 +217,7 @@ async def fetch_post_comment(self, aweme_id: str, cursor: int = 0, count: int =
kwargs = await self.get_tiktok_headers()
# proxies = {"http://": 'http://43.159.18.174:25263', "https://": 'http://43.159.18.174:25263'}
# 创建一个基础爬虫
base_crawler = BaseCrawler(proxies=None, crawler_headers=kwargs["headers"])
base_crawler = BaseCrawler(proxies=kwargs["proxies"], crawler_headers=kwargs["headers"])
async with base_crawler as crawler:
# 创建一个作品评论的BaseModel参数
params = PostComment(aweme_id=aweme_id, cursor=cursor, count=count, current_region=current_region)
Expand Down

0 comments on commit a7f87df

Please sign in to comment.