Skip to content

Commit

Permalink
feat: choose max size file if no file found
Browse files Browse the repository at this point in the history
  • Loading branch information
g0ldyy committed Mar 3, 2025
1 parent a320f40 commit 3ed5057
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 6 deletions.
10 changes: 9 additions & 1 deletion comet/debrid/stremthru.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,8 +184,9 @@ async def generate_download_link(
name_parsed = parse(name)
target_file = None

debrid_files = magnet["data"]["files"]
files = []
for file in magnet["data"]["files"]:
for file in debrid_files:
filename = file["name"]
filename_parsed = parse(filename)

Expand Down Expand Up @@ -223,6 +224,13 @@ async def generate_download_link(
if len(files) > 0:
asyncio.create_task(cache_availability(self.real_debrid_name, files))

if not target_file and len(debrid_files) > 0:
files_with_link = [
file for file in debrid_files if "link" in file and file["link"]
]
if len(files_with_link) > 0:
target_file = max(files_with_link, key=lambda x: x["size"])

if not target_file:
return

Expand Down
20 changes: 15 additions & 5 deletions comet/utils/torrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,11 +287,13 @@ async def _process_queue(self):
await self._flush_batch()

self.is_running = False

def _reset_batches(self):
for key in self.batches:
if len(self.batches[key]) > 0:
logger.warning(f"Ignoring {len(self.batches[key])} items in problematic '{key}' batch")
logger.warning(
f"Ignoring {len(self.batches[key])} items in problematic '{key}' batch"
)
self.batches[key] = []

async def _flush_batch(self):
Expand Down Expand Up @@ -327,14 +329,22 @@ async def _flush_batch(self):
existing_set = {
(
row["info_hash"],
row["season"] if row["season"] is not None else None,
row["episode"] if row["episode"] is not None else None,
row["season"]
if row["season"] is not None
else None,
row["episode"]
if row["episode"] is not None
else None,
)
for row in existing_rows
}

for item in sub_batch:
key = (item["info_hash"], item["season"], item["episode"])
key = (
item["info_hash"],
item["season"],
item["episode"],
)
if key in existing_set:
self.batches["updates"].append(item["params"])
else:
Expand Down

0 comments on commit 3ed5057

Please sign in to comment.