@@ -80,13 +80,13 @@ def __init__(self, isTorrentOrMagnet, isDotTorrentFile) -> None:
8080 self .isTorrentOrMagnet = isTorrentOrMagnet
8181 self .isDotTorrentFile = isDotTorrentFile
8282
83- def __init__ (self , filename , isRadarr ) -> None :
83+ def __init__ (self , filename , isRadarr , filePath = None ) -> None :
8484 print ('filename:' , filename )
8585 baseBath = getPath (isRadarr )
8686 isDotTorrentFile = filename .casefold ().endswith ('.torrent' )
8787 isTorrentOrMagnet = isDotTorrentFile or filename .casefold ().endswith ('.magnet' )
8888 filenameWithoutExt , _ = os .path .splitext (filename )
89- filePath = os .path .join (baseBath , filename )
89+ filePath = filePath or os .path .join (baseBath , filename )
9090 filePathProcessing = os .path .join (baseBath , 'processing' , filename )
9191 folderPathCompleted = os .path .join (baseBath , 'completed' , filenameWithoutExt )
9292 folderPathMountTorrent = os .path .join (blackhole ['rdMountTorrentsPath' ], filenameWithoutExt )
@@ -96,11 +96,10 @@ def __init__(self, filename, isRadarr) -> None:
9696
9797
9898class TorrentBase (ABC ):
99- def __init__ (self , f , file , fail , failIfNotCached , onlyLargestFile ) -> None :
99+ def __init__ (self , f , file , failIfNotCached , onlyLargestFile ) -> None :
100100 super ().__init__ ()
101101 self .f = f
102102 self .file = file
103- self .fail = fail
104103 self .failIfNotCached = failIfNotCached
105104 self .onlyLargestFile = onlyLargestFile
106105 self .id = None
@@ -125,7 +124,6 @@ def submitTorrent(self):
125124 instantAvailability = self .getInstantAvailability ()
126125 self .print ('instantAvailability:' , not not instantAvailability )
127126 if not instantAvailability :
128- self .fail (self )
129127 return False
130128
131129 availableHost = self .getAvailableHost ()
@@ -261,7 +259,7 @@ def getPath(isRadarr, create=False):
261259 finalPath = os .path .join (absoluteBaseWatchPath , blackhole ['radarrPath' ] if isRadarr else blackhole ['sonarrPath' ])
262260
263261 if create :
264- for sub_path in ['' , 'processing' , 'completed' ]:
262+ for sub_path in ['' , 'processing' , 'completed' , 'uncached' ]:
265263 path_to_check = os .path .join (finalPath , sub_path )
266264 if not os .path .exists (path_to_check ):
267265 os .makedirs (path_to_check )
@@ -319,7 +317,7 @@ def print(*values: object):
319317
320318import signal
321319
322- async def processFile (file : TorrentFileInfo , arr : Arr , isRadarr ):
320+ async def processFile (file : TorrentFileInfo , arr : Arr , isRadarr , failIfNotCached = None ):
323321 try :
324322 _print = globals ()['print' ]
325323
@@ -346,7 +344,7 @@ async def is_accessible(path, timeout=10):
346344 executor .shutdown (wait = False )
347345
348346 with open (file .fileInfo .filePathProcessing , 'rb' if file .torrentInfo .isDotTorrentFile else 'r' ) as f :
349- def fail (torrent : TorrentBase , arr : Arr = arr ):
347+ def fail (torrent : TorrentBase , arr : Arr = arr , uncached = False ):
350348 print (f"Failing" )
351349
352350 history = arr .getHistory (blackhole ['historyPageSize' ])['records' ]
@@ -358,15 +356,25 @@ def fail(torrent: TorrentBase, arr: Arr=arr):
358356 for item in items :
359357 # TODO: See if we can fail without blacklisting as cached items constantly changes
360358 arr .failHistoryItem (item ['id' ])
359+
360+ if uncached and items :
361+ ids = '-' .join (str (item .get ('episodeId' , item ['movieId' ])) for item in items )
362+ path = os .path .join (getPath (isRadarr ), 'uncached' , ids )
363+ os .renames (torrent .file .fileInfo .filePathProcessing , path )
364+
361365 print (f"Failed" )
366+
362367
368+ failIfNotCached = blackhole ['failIfNotCached' ] if failIfNotCached is None else failIfNotCached ;
363369 onlyLargestFile = isRadarr or bool (re .search (r'S[\d]{2}E[\d]{2}' , file .fileInfo .filename ))
364370 if file .torrentInfo .isDotTorrentFile :
365- torrent = Torrent (f , file , fail , blackhole [ ' failIfNotCached' ] , onlyLargestFile )
371+ torrent = Torrent (f , file , failIfNotCached , onlyLargestFile )
366372 else :
367- torrent = Magnet (f , file , fail , blackhole [ ' failIfNotCached' ] , onlyLargestFile )
373+ torrent = Magnet (f , file , failIfNotCached , onlyLargestFile )
368374
369- if torrent .submitTorrent ():
375+ if not torrent .submitTorrent ():
376+ historyItems = fail (torrent , uncached = True )
377+ else :
370378 count = 0
371379 while True :
372380 count += 1
@@ -387,7 +395,7 @@ def fail(torrent: TorrentBase, arr: Arr=arr):
387395 if torrent .incompatibleHashSize and torrent .failIfNotCached :
388396 print ("Non-cached incompatible hash sized torrent" )
389397 torrent .delete ()
390- fail (torrent )
398+ fail (torrent , uncached = True )
391399 break
392400 await asyncio .sleep (1 )
393401 elif status == 'magnet_error' or status == 'error' or status == 'dead' or status == 'virus' :
@@ -500,7 +508,7 @@ def fail(torrent: TorrentBase, arr: Arr=arr):
500508
501509def getFiles (isRadarr ):
502510 print ('getFiles' )
503- files = (TorrentFileInfo (filename , isRadarr ) for filename in os .listdir (getPath (isRadarr )) if filename not in ['processing' , 'completed' ])
511+ files = (TorrentFileInfo (filename , isRadarr ) for filename in os .listdir (getPath (isRadarr )) if filename not in ['processing' , 'completed' , 'uncached' ])
504512 return [file for file in files if file .torrentInfo .isTorrentOrMagnet ]
505513
506514async def on_created (isRadarr ):
@@ -541,5 +549,66 @@ async def on_created(isRadarr):
541549def start (isRadarr ):
542550 asyncio .run (on_created (isRadarr ))
543551
552+ def removeDir (dirPath ):
553+ files = os .listdir (dirPath )
554+ for file in files :
555+ os .remove (os .path .join (dirPath , file ))
556+ os .rmdir (dirPath )
557+
558+ async def processUncachedDir (root , dir , arr , isRadarr ):
559+ try :
560+ dirPath = os .path .join (root , dir )
561+ files = os .listdir (dirPath )
562+ if not files :
563+ os .rmdir (dirPath )
564+ return
565+
566+ ids = dir .split ('-' )
567+ if all (arr .getGrandchild (id ).hasFile for id in ids ):
568+ removeDir (dirPath )
569+ return
570+
571+ files = sorted ((os .path .join (dirPath , file ) for file in files ), key = os .path .getctime )
572+
573+ newestFileTime = os .path .getctime (files [- 1 ])
574+ if (time .time () - newestFileTime ) <= 900 : # 15 minutes
575+ return
576+
577+ oldestFile = files [0 ]
578+ await processFile (TorrentFileInfo (os .path .basename (oldestFile ), isRadarr , oldestFile ), arr , isRadarr , failIfNotCached = False )
579+ removeDir (dirPath )
580+ except :
581+ e = traceback .format_exc ()
582+
583+ print (f"Error processing uncached directory: { dirPath } " )
584+ print (e )
585+
586+ discordError (f"Error processing uncached directory: { dirPath } " , e )
587+
588+ async def processUncached ():
589+ print ('Processing uncached' )
590+ try :
591+ radarr = Radarr ()
592+ sonarr = Sonarr ()
593+
594+ paths = [(os .path .join (getPath (isRadarr = True ), 'uncached' ), radarr , True ), (os .path .join (getPath (isRadarr = False ), 'uncached' ), sonarr , False )]
595+
596+ futures : list [asyncio .Future ] = []
597+
598+ for path , arr , isRadarr in paths :
599+ for root , dirs , _ in os .walk (path ):
600+ futures .append (asyncio .gather (* (processUncachedDir (root , dir , arr , isRadarr ) for dir in dirs )))
601+
602+ await asyncio .gather (* futures )
603+ except :
604+ e = traceback .format_exc ()
605+
606+ print (f"Error processing uncached" )
607+ print (e )
608+
609+ discordError (f"Error processing uncached" , e )
610+ print ("Finished processing uncached" )
611+
612+
544613if __name__ == "__main__" :
545614 start (isRadarr = sys .argv [1 ] == 'radarr' )
0 commit comments