Skip to content

Commit

Permalink
改进测试脚本,更新测试用例
Browse files Browse the repository at this point in the history
  • Loading branch information
Yuukiy committed Jan 7, 2024
1 parent 78d0c0f commit b6cdd22
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 19 deletions.
21 changes: 7 additions & 14 deletions unittest/data/KQBD-089 (msin).json
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,14 @@
"cover": "https://pics.dmm.co.jp/digital/video/244kqbd00089/244kqbd00089pl.jpg",
"big_cover": null,
"genre": [
"4K",
"ハイビジョン",
"単体作品",
"フェラ",
"ごっくん",
"お掃除フェラ",
"美少女",
"中出し",
"中出し",
"バック",
"",
"",
"制服",
"中出し",
"ベロ",
"キス",
"",
"舐め"
"電マ",
"女子校生",
"拘束",
"セーラー服"
],
"genre_id": null,
"genre_norm": null,
Expand Down
20 changes: 15 additions & 5 deletions unittest/test_crawlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
sys.path.insert(0, os.path.abspath(os.path.join(file_dir, '..')))

from core.datatype import MovieInfo
from web.exceptions import CrawlerError
from web.exceptions import CrawlerError, SiteBlocked


logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -47,12 +47,15 @@ def compare(avid, scraper, file):
parse_data = getattr(mod, 'parse_clean_data')
else:
parse_data = getattr(mod, 'parse_data')

try:
parse_data(online)
except CrawlerError as e:
logger.info(e)
except requests.exceptions.ReadTimeout as e:
except SiteBlocked as e:
logger.warning(e)
return
except (CrawlerError, requests.exceptions.ReadTimeout) as e:
logger.info(e)

try:
# 解包数据再进行比较,以便测试不通过时快速定位不相等的键值
local_vars = vars(local)
Expand All @@ -68,12 +71,19 @@ def compare(avid, scraper, file):
assert urlsplit(v).path == urlsplit(local_vars.get(k, None)).path
elif k == 'actress_pics' and scraper == 'javbus':
local_tmp = online_tmp = {}
local_pics = local_vars.get('actress_pics')
local_pics = local_vars.get(k)
if local_pics:
local_tmp = {name: urlsplit(url).path for name, url in local_pics.items()}
if v:
online_tmp = {name: urlsplit(url).path for name, url in v.items()}
assert local_tmp == online_tmp
elif k == 'preview_pics' and scraper == 'javbus':
local_pics = local_vars.get(k)
if local_pics:
local_tmp = [urlsplit(i).path for i in local_pics]
if v:
online_tmp = [urlsplit(i).path for i in v]
assert local_tmp == online_tmp
# 对顺序没有要求的list型字段,比较时也应该忽略顺序信息
elif k in ['genre', 'genre_id', 'genre_norm', 'actress']:
if isinstance(v, list):
Expand Down

0 comments on commit b6cdd22

Please sign in to comment.