Skip to content

Commit

Permalink
Merge branch 'python3-dev'
Browse files Browse the repository at this point in the history
IMP:Attempt to parse issue ID from webpage, if notes field doesn't exist (newer ComicTagger versions) (@bbtufty)
IMP: Added kubernetes detection (@lavahot)
IMP: queue_schedule: introduce helper functions for common start and shutdown code (@OddBloke)
FIX:(mylar3#1336)(mylar3#1232)(mylar3#873) Import would fail when importing a filename with no issue number
FIX:(mylar3#1231) Incomplete series would cause post-processing to fail if Latest Date and Publication Run were not generated
FIX:(mylar3#1361) Use shutil.move to support cross-device renames (@beardypig)
FIX:If annual gathering failed during generation, would break the add series queue
FIX:Import would fail if a series on the watchlist was being refreshed during, or if it hadn't completed for w/e reason
FIX:Ignore 0-byte files/placeholders during filescans
FIX: getcomics correct logging call (@OddBloke)
  • Loading branch information
evilhero committed Jul 26, 2023
2 parents eca80ef + 98a50f1 commit f597463
Show file tree
Hide file tree
Showing 9 changed files with 145 additions and 169 deletions.
19 changes: 13 additions & 6 deletions mylar/PostProcessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -758,6 +758,9 @@ def Process(self):

#check for Paused status /
#check for Ended status and 100% completion of issues.
if wv['ComicPublished'] is None:
logger.fdebug('Publication Run cannot be generated - probably due to an incomplete Refresh. Manually refresh the following series and try again: %s (%s)' % (wv['ComicName'], wv['ComicYear']))
continue
if any([wv['Status'] == 'Paused', bool(wv['ForceContinuing']) is True]) or (wv['Have'] == wv['Total'] and not any(['Present' in wv['ComicPublished'], helpers.now()[:4] in wv['ComicPublished']])):
dbcheck = myDB.selectone('SELECT Status FROM issues WHERE ComicID=? and Int_IssueNumber=?', [wv['ComicID'], tmp_iss]).fetchone()
if not dbcheck and mylar.CONFIG.ANNUALS_ON:
Expand Down Expand Up @@ -836,13 +839,17 @@ def Process(self):
ld_check = myDB.selectone('SELECT ReleaseDate, Issue_Number, Int_IssueNumber from issues WHERE ComicID=? order by ReleaseDate DESC', [wv['ComicID']]).fetchone()
if ld_check:
#tmplatestdate = latestdate[0]
if ld_check[0][:4] != wv['LatestDate'][:4]:
if ld_check[0][:4] > wv['LatestDate'][:4]:
latestdate = ld_check[0]
try:
if ld_check[0][:4] != wv['LatestDate'][:4]:
if ld_check[0][:4] > wv['LatestDate'][:4]:
latestdate = ld_check[0]
else:
latestdate = wv['LatestDate']
else:
latestdate = wv['LatestDate']
else:
latestdate = ld_check[0]
latestdate = ld_check[0]
except Exception as e:
logger.fdebug('Unable to properly attain the Latest Date for series: %s. Cannot check against this series for post-processing.' % wv_comicname)
continue
tmplatestissue = ld_check[1]
tmplatestissueint = ld_check[2]
logger.fdebug('tmplatestissue: %s' %(tmplatestissue))
Expand Down
207 changes: 72 additions & 135 deletions mylar/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -656,22 +656,49 @@ def start():
started = True

def queue_schedule(queuetype, mode):
def start(pool_attr, target, q_arg, name, before_msg, after_msg):
pool = getattr(mylar, pool_attr)
try:
if pool.is_alive() is True:
return
except Exception as e:
pass

#global _INITIALIZED
logger.info('[%s] %s', name, before_msg)
thread = threading.Thread(target=target, args=(q_arg,), name=name)
setattr(mylar, pool_attr, thread)
thread.start()
logger.info('[%s] %s', name, after_msg)

if mode == 'start':
if queuetype == 'snatched_queue':
try:
if mylar.SNPOOL.is_alive() is True:
return
except Exception as e:
pass
def shutdown(pool, mylar_queue, thread_name):
try:
if pool.is_alive() is False:
return
except Exception as e:
return

logger.info('[AUTO-SNATCHER] Auto-Snatch of completed torrents enabled & attempting to background load....')
mylar.SNPOOL = threading.Thread(target=helpers.worker_main, args=(SNATCHED_QUEUE,), name="AUTO-SNATCHER")
mylar.SNPOOL.start()
logger.info('[AUTO-SNATCHER] Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....')
logger.fdebug(f'Terminating the {thread_name} thread')
try:
mylar_queue.put('exit')
pool.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar_queue.put('exit')
pool.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)

if mode == 'start':
if queuetype == 'snatched_queue':
start(
"SNPOOL",
helpers.worker_main,
SNATCHED_QUEUE,
"AUTO-SNATCHER",
'Auto-Snatch of completed torrents enabled & attempting to background load....',
'Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....',
)
elif queuetype == 'nzb_queue':
try:
if mylar.NZBPOOL.is_alive() is True:
Expand All @@ -691,145 +718,55 @@ def queue_schedule(queuetype, mode):
logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for NZBGet - will now monitor for completed nzbs within nzbget and post-process automatically...')

elif queuetype == 'search_queue':
try:
if mylar.SEARCHPOOL.is_alive() is True:
return
except Exception as e:
pass

logger.info('[SEARCH-QUEUE] Attempting to background load the search queue....')
mylar.SEARCHPOOL = threading.Thread(target=helpers.search_queue, args=(SEARCH_QUEUE,), name="SEARCH-QUEUE")
mylar.SEARCHPOOL.start()
logger.info('[SEARCH-QUEUE] Successfully started the Search Queuer...')
start(
"SEARCHPOOL",
helpers.search_queue,
SEARCH_QUEUE,
"SEARCH-QUEUE",
'Attempting to background load the search queue....',
'Successfully started the Search Queuer...',
)
elif queuetype == 'pp_queue':
try:
if mylar.PPPOOL.is_alive() is True:
return
except Exception as e:
pass

logger.info('[POST-PROCESS-QUEUE] Post Process queue enabled & monitoring for api requests....')
mylar.PPPOOL = threading.Thread(target=helpers.postprocess_main, args=(PP_QUEUE,), name="POST-PROCESS-QUEUE")
mylar.PPPOOL.start()
logger.info('[POST-PROCESS-QUEUE] Succesfully started Post-Processing Queuer....')

start(
"PPPOOL",
helpers.postprocess_main,
PP_QUEUE,
"POST-PROCESS-QUEUE",
'Post Process queue enabled & monitoring for api requests....',
'Succesfully started Post-Processing Queuer....',
)
elif queuetype == 'ddl_queue':
try:
if mylar.DDLPOOL.is_alive() is True:
return
except Exception as e:
pass

logger.info('[DDL-QUEUE] DDL Download queue enabled & monitoring for requests....')
mylar.DDLPOOL = threading.Thread(target=helpers.ddl_downloader, args=(DDL_QUEUE,), name="DDL-QUEUE")
mylar.DDLPOOL.start()
logger.info('[DDL-QUEUE:] Succesfully started DDL Download Queuer....')

start(
"DDLPOOL",
helpers.ddl_downloader,
DDL_QUEUE,
"DDL-QUEUE",
'DDL Download queue enabled & monitoring for requests....',
'Succesfully started DDL Download Queuer....',
)
else:
if (queuetype == 'nzb_queue') or mode == 'shutdown':
try:
if mylar.NZBPOOL.is_alive() is False:
return
elif all([mode!= 'shutdown', mylar.CONFIG.POST_PROCESSING is True]) and ( all([mylar.CONFIG.NZB_DOWNLOADER == 0, mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True]) or all([mylar.CONFIG.NZB_DOWNLOADER == 1, mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]) ):
return
except Exception as e:
if all([mode!= 'shutdown', mylar.CONFIG.POST_PROCESSING is True]) and ( all([mylar.CONFIG.NZB_DOWNLOADER == 0, mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True]) or all([mylar.CONFIG.NZB_DOWNLOADER == 1, mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]) ):
return

logger.fdebug('Terminating the NZB auto-complete queue thread')
try:
mylar.NZB_QUEUE.put('exit')
mylar.NZBPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.NZB_QUEUE.put('exit')
mylar.NZBPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)

shutdown(mylar.NZBPOOL, mylar.NZB_QUEUE, "NZB auto-complete queue")

if (queuetype == 'snatched_queue') or mode == 'shutdown':
try:
if mylar.SNPOOL.is_alive() is False:
return
elif all([mode != 'shutdown', mylar.CONFIG.ENABLE_TORRENTS is True, mylar.CONFIG.AUTO_SNATCH is True, OS_DETECT != 'Windows']) and any([mylar.CONFIG.TORRENT_DOWNLOADER == 2, mylar.CONFIG.TORRENT_DOWNLOADER == 4]):
return
except Exception as e:
if all([mode != 'shutdown', mylar.CONFIG.ENABLE_TORRENTS is True, mylar.CONFIG.AUTO_SNATCH is True, OS_DETECT != 'Windows']) and any([mylar.CONFIG.TORRENT_DOWNLOADER == 2, mylar.CONFIG.TORRENT_DOWNLOADER == 4]):
return


logger.fdebug('Terminating the auto-snatch thread.')
try:
mylar.SNATCHED_QUEUE.put('exit')
mylar.SNPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.SNATCHED_QUEUE.put('exit')
mylar.SNPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
shutdown(mylar.SNPOOL, mylar.SNATCHED_QUEUE, "auto-snatch")

if (queuetype == 'search_queue') or mode == 'shutdown':
try:
if mylar.SEARCHPOOL.is_alive() is False:
return
except Exception as e:
return

logger.fdebug('Terminating the search queue thread.')
try:
mylar.SEARCH_QUEUE.put('exit')
mylar.SEARCHPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.SEARCH_QUEUE.put('exit')
mylar.SEARCHPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
shutdown(mylar.SEARCHPOOL, mylar.SEARCH_QUEUE, 'search queue')

if (queuetype == 'pp_queue') or mode == 'shutdown':
try:
if mylar.PPPOOL.is_alive() is False:
return
elif all([mylar.CONFIG.POST_PROCESSING is True, mode != 'shutdown']):
return
except Exception as e:
if all([mylar.CONFIG.POST_PROCESSING is True, mode != 'shutdown']):
return

logger.fdebug('Terminating the post-processing queue thread.')
try:
mylar.PP_QUEUE.put('exit')
mylar.PPPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.PP_QUEUE.put('exit')
mylar.PPPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
shutdown(mylar.PPPOOL, mylar.PP_QUEUE, 'post-processing queue')

if (queuetype == 'ddl_queue') or mode == 'shutdown':
try:
if mylar.DDLPOOL.is_alive() is False:
return
elif all([mylar.CONFIG.ENABLE_DDL is True, mode != 'shutdown']):
return
except Exception as e:
if all([mylar.CONFIG.ENABLE_DDL is True, mode != 'shutdown']):
return

logger.fdebug('Terminating the DDL download queue thread')
try:
mylar.DDL_QUEUE.put('exit')
mylar.DDLPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.DDL_QUEUE.put('exit')
DDLPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
shutdown(mylar.DDLPOOL, mylar.DDL_QUEUE, 'DDL download queue')


def sql_db():
Expand Down
6 changes: 3 additions & 3 deletions mylar/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -858,7 +858,7 @@ def process_kwargs(self, kwargs):
if config.has_section(section):
config.remove_option(section, ini_key)
if len(dict(config.items(section))) == 0:
config.remove_section(section)
config.remove_section(section)
except configparser.NoSectionError:
continue

Expand Down Expand Up @@ -1116,7 +1116,7 @@ def configure(self, update=False, startup=False):
logger.fdebug('[Cache Cleanup] Cache Cleanup finished. Nothing to clean!')

d_path = '/proc/self/cgroup'
if os.path.exists('/.dockerenv') or os.path.isfile(d_path) and any('docker' in line for line in open(d_path)):
if os.path.exists('/.dockerenv') or 'KUBERNETES_SERVICE_HOST' in os.environ or os.path.isfile(d_path) and any('docker' in line for line in open(d_path)):
logger.info('[DOCKER-AWARE] Docker installation detected.')
mylar.INSTALL_TYPE = 'docker'
if any([self.DESTINATION_DIR is None, self.DESTINATION_DIR == '']):
Expand Down Expand Up @@ -1669,7 +1669,7 @@ def provider_sequence(self):
"provider": found['provider'],
"orig_seq": int(seqnum)})
i-=1

#now we reorder based on priority of orig_seq, but use a new_order seq
xa = 0
NPROV = []
Expand Down
6 changes: 6 additions & 0 deletions mylar/filechecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -1597,6 +1597,7 @@ def traverse_directories(self, dir):
continue

filename = fname
comicsize = 0
if os.path.splitext(filename)[1].lower().endswith(comic_ext):
if direc is None:
try:
Expand All @@ -1605,6 +1606,11 @@ def traverse_directories(self, dir):
logger.warn('error: %s' % e)
else:
comicsize = os.path.getsize(os.path.join(dir, direc, fname))

if comicsize == 0:
# 0-byte size file encountered - ignore it as it's a placeholder most likely
continue

filelist.append({'directory': direc, #subdirectory if it exists
'filename': filename,
'comicsize': comicsize})
Expand Down
2 changes: 1 addition & 1 deletion mylar/getcomics.py
Original file line number Diff line number Diff line change
Expand Up @@ -920,7 +920,7 @@ def downloadit(self, id, link, mainlink, resume=None, issueid=None, remote_files
f.flush()

except requests.exceptions.Timeout as e:
logger.error('[ERROR] download has timed out due to inactivity...' % e)
logger.error('[ERROR] download has timed out due to inactivity...: %s', e)
mylar.DDL_LOCK = False
return {"success": False, "filename": filename, "path": None}

Expand Down
4 changes: 3 additions & 1 deletion mylar/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1660,7 +1660,9 @@ def annual_check(ComicName, SeriesYear, comicid, issuetype, issuechk, annualslis
for manchk in annload:
if manchk['ReleaseComicID'] is not None or manchk['ReleaseComicID'] is not None: #if it exists, then it's a pre-existing add
#print str(manchk['ReleaseComicID']), comic['ComicName'], str(SeriesYear), str(comicid)
annualslist += manualAnnual(manchk['ReleaseComicID'], ComicName, SeriesYear, comicid, manualupd=True, deleted=manchk['Deleted'])
tmp_the_annuals = manualAnnual(manchk['ReleaseComicID'], ComicName, SeriesYear, comicid, manualupd=True, deleted=manchk['Deleted'])
if tmp_the_annuals:
annualslist += tmp_the_annuals
annualids.append(manchk['ReleaseComicID'])

annualcomicname = re.sub('[\,\:]', '', ComicName)
Expand Down
20 changes: 19 additions & 1 deletion mylar/librarysync.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,14 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
#use the comicname_filesafe to start
watchdisplaycomic = watch['ComicName']
# let's clean up the name, just in case for comparison purposes...
watchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\+\'\?\@]', '', watch['ComicName_Filesafe'])
try:
watchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\+\'\?\@]', '', watch['ComicName_Filesafe'])
except Exception as e:
logger.warn('[IMPORT] Unable to properly retrieve series name from watchlist.'
' This is due most likely to previous problems refreshing/adding the seriess %s [error: %s]'
% (watch['ComicName_Filesafe'], e)
)
continue
#watchcomic = re.sub('\s+', ' ', str(watchcomic)).strip()

if ' the ' in watchcomic.lower():
Expand Down Expand Up @@ -373,6 +380,17 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
else:
logger.fdebug('[IMPORT-CBZ] Unable to retrieve IssueID from meta-tagging. If there is other metadata present I will use that.')

# If this doesn't work, we can fall back to try and parse from the webpage
webpage = issueinfo['metadata']['webpage']
logger.fdebug('[IMPORT-CBZ] Webpage: ' + webpage)
if webpage is not None and webpage != 'None' and 'comicvine.gamespot.com' in webpage and issuenotes_id is None:
issue_id = webpage.strip('/').split('/')[-1].split('-')[-1]
if issue_id:
issuenotes_id = issue_id
logger.fdebug('[IMPORT-CBZ] Successfully retrieved CV IssueID for ' + comicname + ' #' + issue_number + ' [' + str(issuenotes_id) + ']')
else:
logger.fdebug('[IMPORT-CBZ] Unable to retrieve IssueID from meta-tagging. If there is other metadata present I will use that.')

logger.fdebug('[IMPORT-CBZ] Adding ' + comicname + ' to the import-queue!')
#impid = comicname + '-' + str(issueyear) + '-' + str(issue_number) #com_NAME + "-" + str(result_comyear) + "-" + str(comiss)
impid = str(random.randint(1000000,99999999))
Expand Down
2 changes: 1 addition & 1 deletion mylar/versioncheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def getVersion(ptv):
else:

d_path = '/proc/self/cgroup'
if os.path.exists('/.dockerenv') or os.path.isfile(d_path) and any('docker' in line for line in open(d_path)):
if os.path.exists('/.dockerenv') or 'KUBERNETES_SERVICE_HOST' in os.environ or os.path.isfile(d_path) and any('docker' in line for line in open(d_path)):
logger.info('[DOCKER-AWARE] Docker installation detected.')
mylar.INSTALL_TYPE = 'docker'
if any([mylar.CONFIG.DESTINATION_DIR is None, mylar.CONFIG.DESTINATION_DIR == '']):
Expand Down
Loading

0 comments on commit f597463

Please sign in to comment.