Skip to content

Commit 176adbf

Browse files
authored
Smart caching if no uncached found
1 parent 0eaa24b commit 176adbf

3 files changed

Lines changed: 125 additions & 17 deletions

File tree

blackhole.py

Lines changed: 82 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -80,13 +80,13 @@ def __init__(self, isTorrentOrMagnet, isDotTorrentFile) -> None:
8080
self.isTorrentOrMagnet = isTorrentOrMagnet
8181
self.isDotTorrentFile = isDotTorrentFile
8282

83-
def __init__(self, filename, isRadarr) -> None:
83+
def __init__(self, filename, isRadarr, filePath=None) -> None:
8484
print('filename:', filename)
8585
baseBath = getPath(isRadarr)
8686
isDotTorrentFile = filename.casefold().endswith('.torrent')
8787
isTorrentOrMagnet = isDotTorrentFile or filename.casefold().endswith('.magnet')
8888
filenameWithoutExt, _ = os.path.splitext(filename)
89-
filePath = os.path.join(baseBath, filename)
89+
filePath = filePath or os.path.join(baseBath, filename)
9090
filePathProcessing = os.path.join(baseBath, 'processing', filename)
9191
folderPathCompleted = os.path.join(baseBath, 'completed', filenameWithoutExt)
9292
folderPathMountTorrent = os.path.join(blackhole['rdMountTorrentsPath'], filenameWithoutExt)
@@ -96,11 +96,10 @@ def __init__(self, filename, isRadarr) -> None:
9696

9797

9898
class TorrentBase(ABC):
99-
def __init__(self, f, file, fail, failIfNotCached, onlyLargestFile) -> None:
99+
def __init__(self, f, file, failIfNotCached, onlyLargestFile) -> None:
100100
super().__init__()
101101
self.f = f
102102
self.file = file
103-
self.fail = fail
104103
self.failIfNotCached = failIfNotCached
105104
self.onlyLargestFile = onlyLargestFile
106105
self.id = None
@@ -125,7 +124,6 @@ def submitTorrent(self):
125124
instantAvailability = self.getInstantAvailability()
126125
self.print('instantAvailability:', not not instantAvailability)
127126
if not instantAvailability:
128-
self.fail(self)
129127
return False
130128

131129
availableHost = self.getAvailableHost()
@@ -261,7 +259,7 @@ def getPath(isRadarr, create=False):
261259
finalPath = os.path.join(absoluteBaseWatchPath, blackhole['radarrPath'] if isRadarr else blackhole['sonarrPath'])
262260

263261
if create:
264-
for sub_path in ['', 'processing', 'completed']:
262+
for sub_path in ['', 'processing', 'completed', 'uncached']:
265263
path_to_check = os.path.join(finalPath, sub_path)
266264
if not os.path.exists(path_to_check):
267265
os.makedirs(path_to_check)
@@ -319,7 +317,7 @@ def print(*values: object):
319317

320318
import signal
321319

322-
async def processFile(file: TorrentFileInfo, arr: Arr, isRadarr):
320+
async def processFile(file: TorrentFileInfo, arr: Arr, isRadarr, failIfNotCached=None):
323321
try:
324322
_print = globals()['print']
325323

@@ -346,7 +344,7 @@ async def is_accessible(path, timeout=10):
346344
executor.shutdown(wait=False)
347345

348346
with open(file.fileInfo.filePathProcessing, 'rb' if file.torrentInfo.isDotTorrentFile else 'r') as f:
349-
def fail(torrent: TorrentBase, arr: Arr=arr):
347+
def fail(torrent: TorrentBase, arr: Arr=arr, uncached=False):
350348
print(f"Failing")
351349

352350
history = arr.getHistory(blackhole['historyPageSize'])['records']
@@ -358,15 +356,25 @@ def fail(torrent: TorrentBase, arr: Arr=arr):
358356
for item in items:
359357
# TODO: See if we can fail without blacklisting as cached items constantly changes
360358
arr.failHistoryItem(item['id'])
359+
360+
if uncached and items:
361+
ids = '-'.join(str(item.get('episodeId', item['movieId'])) for item in items)
362+
path = os.path.join(getPath(isRadarr), 'uncached', ids)
363+
os.renames(torrent.file.fileInfo.filePathProcessing, path)
364+
361365
print(f"Failed")
366+
362367

368+
failIfNotCached = blackhole['failIfNotCached'] if failIfNotCached is None else failIfNotCached;
363369
onlyLargestFile = isRadarr or bool(re.search(r'S[\d]{2}E[\d]{2}', file.fileInfo.filename))
364370
if file.torrentInfo.isDotTorrentFile:
365-
torrent = Torrent(f, file, fail, blackhole['failIfNotCached'], onlyLargestFile)
371+
torrent = Torrent(f, file, failIfNotCached, onlyLargestFile)
366372
else:
367-
torrent = Magnet(f, file, fail, blackhole['failIfNotCached'], onlyLargestFile)
373+
torrent = Magnet(f, file, failIfNotCached, onlyLargestFile)
368374

369-
if torrent.submitTorrent():
375+
if not torrent.submitTorrent():
376+
historyItems = fail(torrent, uncached=True)
377+
else:
370378
count = 0
371379
while True:
372380
count += 1
@@ -387,7 +395,7 @@ def fail(torrent: TorrentBase, arr: Arr=arr):
387395
if torrent.incompatibleHashSize and torrent.failIfNotCached:
388396
print("Non-cached incompatible hash sized torrent")
389397
torrent.delete()
390-
fail(torrent)
398+
fail(torrent, uncached=True)
391399
break
392400
await asyncio.sleep(1)
393401
elif status == 'magnet_error' or status == 'error' or status == 'dead' or status == 'virus':
@@ -500,7 +508,7 @@ def fail(torrent: TorrentBase, arr: Arr=arr):
500508

501509
def getFiles(isRadarr):
502510
print('getFiles')
503-
files = (TorrentFileInfo(filename, isRadarr) for filename in os.listdir(getPath(isRadarr)) if filename not in ['processing', 'completed'])
511+
files = (TorrentFileInfo(filename, isRadarr) for filename in os.listdir(getPath(isRadarr)) if filename not in ['processing', 'completed', 'uncached'])
504512
return [file for file in files if file.torrentInfo.isTorrentOrMagnet]
505513

506514
async def on_created(isRadarr):
@@ -541,5 +549,66 @@ async def on_created(isRadarr):
541549
def start(isRadarr):
542550
asyncio.run(on_created(isRadarr))
543551

552+
def removeDir(dirPath):
553+
files = os.listdir(dirPath)
554+
for file in files:
555+
os.remove(os.path.join(dirPath, file))
556+
os.rmdir(dirPath)
557+
558+
async def processUncachedDir(root, dir, arr, isRadarr):
559+
try:
560+
dirPath = os.path.join(root, dir)
561+
files = os.listdir(dirPath)
562+
if not files:
563+
os.rmdir(dirPath)
564+
return
565+
566+
ids = dir.split('-')
567+
if all(arr.getGrandchild(id).hasFile for id in ids):
568+
removeDir(dirPath)
569+
return
570+
571+
files = sorted((os.path.join(dirPath, file) for file in files), key=os.path.getctime)
572+
573+
newestFileTime = os.path.getctime(files[-1])
574+
if (time.time() - newestFileTime) <= 900: # 15 minutes
575+
return
576+
577+
oldestFile = files[0]
578+
await processFile(TorrentFileInfo(os.path.basename(oldestFile), isRadarr, oldestFile), arr, isRadarr, failIfNotCached=False)
579+
removeDir(dirPath)
580+
except:
581+
e = traceback.format_exc()
582+
583+
print(f"Error processing uncached directory: {dirPath}")
584+
print(e)
585+
586+
discordError(f"Error processing uncached directory: {dirPath}", e)
587+
588+
async def processUncached():
589+
print('Processing uncached')
590+
try:
591+
radarr = Radarr()
592+
sonarr = Sonarr()
593+
594+
paths = [(os.path.join(getPath(isRadarr=True), 'uncached'), radarr, True), (os.path.join(getPath(isRadarr=False), 'uncached'), sonarr, False)]
595+
596+
futures: list[asyncio.Future] = []
597+
598+
for path, arr, isRadarr in paths:
599+
for root, dirs, _ in os.walk(path):
600+
futures.append(asyncio.gather(*(processUncachedDir(root, dir, arr, isRadarr) for dir in dirs)))
601+
602+
await asyncio.gather(*futures)
603+
except:
604+
e = traceback.format_exc()
605+
606+
print(f"Error processing uncached")
607+
print(e)
608+
609+
discordError(f"Error processing uncached", e)
610+
print("Finished processing uncached")
611+
612+
544613
if __name__ == "__main__":
545614
start(isRadarr=sys.argv[1] == 'radarr')

blackhole_watcher.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1+
import asyncio
12
from watchdog.observers import Observer
23
from watchdog.events import FileSystemEventHandler
3-
from blackhole import start, getPath
4+
from blackhole import start, processUncached, getPath
45

56
class BlackholeHandler(FileSystemEventHandler):
67
def __init__(self, is_radarr):
@@ -18,6 +19,12 @@ def on_created(self, event):
1819
self.is_processing = False
1920

2021

22+
async def scheduleProcessUncached():
23+
while True:
24+
await asyncio.sleep(600) # 10 minutes
25+
await processUncached()
26+
27+
2128
if __name__ == "__main__":
2229
print("Watching blackhole")
2330

@@ -33,6 +40,7 @@ def on_created(self, event):
3340
try:
3441
radarr_observer.start()
3542
sonarr_observer.start()
43+
asyncio.run(scheduleProcessUncached())
3644
except KeyboardInterrupt:
3745
radarr_observer.stop()
3846
sonarr_observer.stop()

shared/arr.py

Lines changed: 34 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ def validateRadarrApiKey():
4040
return False
4141

4242
return True
43+
4344
requiredEnvs = {
4445
'Sonarr host': (sonarr['host'], validateSonarrHost),
4546
'Sonarr API key': (sonarr['apiKey'], validateSonarrApiKey, True),
@@ -67,6 +68,10 @@ def id(self):
6768
def title(self):
6869
return self.json['title']
6970

71+
@property
72+
def hasFile(self):
73+
return self.json.get('hasFile', False)
74+
7075
@property
7176
def path(self):
7277
return self.json['path']
@@ -132,6 +137,22 @@ def setChildMonitored(self, childId: int, monitored: bool):
132137
season['monitored'] = monitored
133138
break
134139

140+
class Episode(Media):
141+
@property
142+
def size(self):
143+
return self.json['sizeOnDisk']
144+
145+
@property
146+
def monitoredChildrenIds(self):
147+
return [self.id] if self.json['monitored'] else []
148+
149+
@property
150+
def fullyAvailableChildrenIds(self):
151+
return [self.id] if self.json['hasFile'] else []
152+
153+
def setChildMonitored(self, childId: int, monitored: bool):
154+
self.json["monitored"] = monitored
155+
135156
class MediaFile(ABC):
136157
def __init__(self, json) -> None:
137158
super().__init__()
@@ -169,19 +190,25 @@ def parentId(self):
169190
return self.json['movieId']
170191

171192
class Arr(ABC):
172-
def __init__(self, host: str, apiKey: str, endpoint: str, fileEndpoint: str, childIdName: str, childName: str, constructor: Type[Media], fileConstructor: Type[MediaFile]) -> None:
193+
def __init__(self, host: str, apiKey: str, endpoint: str, fileEndpoint: str, childIdName: str, childName: str, grandchildEndpoint: str, constructor: Type[Media], grandchildConstructor:Type[Media], fileConstructor: Type[MediaFile]) -> None:
173194
self.host = host
174195
self.apiKey = apiKey
175196
self.endpoint = endpoint
176197
self.fileEndpoint = fileEndpoint
177198
self.childIdName = childIdName
178199
self.childName = childName
200+
self.grandchildEndpoint = grandchildEndpoint
179201
self.constructor = constructor
202+
self.grandchildConstructor = grandchildConstructor
180203
self.fileConstructor = fileConstructor
181204

182205
def get(self, id: int):
183206
get = requests.get(f"{self.host}/api/v3/{self.endpoint}/{id}?apiKey={self.apiKey}")
184207
return self.constructor(get.json())
208+
209+
def getGrandchild(self, id: int):
210+
get = requests.get(f"{self.host}/api/v3/{self.grandchildEndpoint}/{id}?apiKey={self.apiKey}")
211+
return self.grandchildConstructor(get.json())
185212

186213
def getAll(self):
187214
get = requests.get(f"{self.host}/api/v3/{self.endpoint}?apiKey={self.apiKey}")
@@ -225,16 +252,18 @@ def automaticSearch(self, media: Media, childId: int):
225252

226253
def _automaticSearchJson(self, media: Media, childId: int):
227254
pass
255+
228256
class Sonarr(Arr):
229257
host = sonarr['host']
230258
apiKey = sonarr['apiKey']
231259
endpoint = 'series'
232260
fileEndpoint = 'episodefile'
233261
childIdName = 'seasonNumber'
234262
childName = 'Season'
263+
grandchildEndpoint = 'episode'
235264

236265
def __init__(self) -> None:
237-
super().__init__(Sonarr.host, Sonarr.apiKey, Sonarr.endpoint, Sonarr.fileEndpoint, Sonarr.childIdName, Sonarr.childName, Show, EpisodeFile)
266+
super().__init__(Sonarr.host, Sonarr.apiKey, Sonarr.endpoint, Sonarr.fileEndpoint, Sonarr.childIdName, Sonarr.childName, Sonarr.grandchildEndpoint, Show, Episode, EpisodeFile)
238267

239268
def _automaticSearchJson(self, media: Media, childId: int):
240269
return {"name": f"{self.childName}Search", f"{self.endpoint}Id": media.id, self.childIdName: childId}
@@ -246,9 +275,11 @@ class Radarr(Arr):
246275
fileEndpoint = 'moviefile'
247276
childIdName = None
248277
childName = 'Movies'
278+
grandchildEndpoint = endpoint
249279

250280
def __init__(self) -> None:
251-
super().__init__(Radarr.host, Radarr.apiKey, Radarr.endpoint, Radarr.fileEndpoint, None, Radarr.childName, Movie, MovieFile)
281+
super().__init__(Radarr.host, Radarr.apiKey, Radarr.endpoint, Radarr.fileEndpoint, Radarr.childIdName, Radarr.childName, Radarr.grandchildEndpoint, Movie, Movie, MovieFile)
252282

253283
def _automaticSearchJson(self, media: Media, childId: int):
254284
return {"name": f"{self.childName}Search", f"{self.endpoint}Ids": [media.id]}
285+

0 commit comments

Comments
 (0)