+
+
+
+
+ CV removed |
+ Paused series |
+ Failed/Error |
+ Loading |
+
+
+
%def>
<%def name="headIncludes()">
@@ -37,7 +47,6 @@
diff --git a/mylar/__init__.py b/mylar/__init__.py
index 3b440e29..e3a3f45d 100644
--- a/mylar/__init__.py
+++ b/mylar/__init__.py
@@ -795,7 +795,7 @@ def dbcheck():
except sqlite3.OperationalError:
logger.warn('Unable to update readinglist table to new storyarc table format.')
- c.execute('CREATE TABLE IF NOT EXISTS comics (ComicID TEXT UNIQUE, ComicName TEXT, ComicSortName TEXT, ComicYear TEXT, DateAdded TEXT, Status TEXT, IncludeExtras INTEGER, Have INTEGER, Total INTEGER, ComicImage TEXT, FirstImageSize INTEGER, ComicPublisher TEXT, PublisherImprint TEXT, ComicLocation TEXT, ComicPublished TEXT, NewPublish TEXT, LatestIssue TEXT, intLatestIssue INT, LatestDate TEXT, Description TEXT, DescriptionEdit TEXT, QUALalt_vers TEXT, QUALtype TEXT, QUALscanner TEXT, QUALquality TEXT, LastUpdated TEXT, AlternateSearch TEXT, UseFuzzy TEXT, ComicVersion TEXT, SortOrder INTEGER, DetailURL TEXT, ForceContinuing INTEGER, ComicName_Filesafe TEXT, AlternateFileName TEXT, ComicImageURL TEXT, ComicImageALTURL TEXT, DynamicComicName TEXT, AllowPacks TEXT, Type TEXT, Corrected_SeriesYear TEXT, Corrected_Type TEXT, TorrentID_32P TEXT, LatestIssueID TEXT, Collects CLOB, IgnoreType INTEGER, AgeRating TEXT, FilesUpdated TEXT, seriesjsonPresent INT, dirlocked INTEGER)')
+ c.execute('CREATE TABLE IF NOT EXISTS comics (ComicID TEXT UNIQUE, ComicName TEXT, ComicSortName TEXT, ComicYear TEXT, DateAdded TEXT, Status TEXT, IncludeExtras INTEGER, Have INTEGER, Total INTEGER, ComicImage TEXT, FirstImageSize INTEGER, ComicPublisher TEXT, PublisherImprint TEXT, ComicLocation TEXT, ComicPublished TEXT, NewPublish TEXT, LatestIssue TEXT, intLatestIssue INT, LatestDate TEXT, Description TEXT, DescriptionEdit TEXT, QUALalt_vers TEXT, QUALtype TEXT, QUALscanner TEXT, QUALquality TEXT, LastUpdated TEXT, AlternateSearch TEXT, UseFuzzy TEXT, ComicVersion TEXT, SortOrder INTEGER, DetailURL TEXT, ForceContinuing INTEGER, ComicName_Filesafe TEXT, AlternateFileName TEXT, ComicImageURL TEXT, ComicImageALTURL TEXT, DynamicComicName TEXT, AllowPacks TEXT, Type TEXT, Corrected_SeriesYear TEXT, Corrected_Type TEXT, TorrentID_32P TEXT, LatestIssueID TEXT, Collects CLOB, IgnoreType INTEGER, AgeRating TEXT, FilesUpdated TEXT, seriesjsonPresent INT, dirlocked INTEGER, cv_removed INTEGER)')
c.execute('CREATE TABLE IF NOT EXISTS issues (IssueID TEXT, ComicName TEXT, IssueName TEXT, Issue_Number TEXT, DateAdded TEXT, Status TEXT, Type TEXT, ComicID TEXT, ArtworkURL Text, ReleaseDate TEXT, Location TEXT, IssueDate TEXT, DigitalDate TEXT, Int_IssueNumber INT, ComicSize TEXT, AltIssueNumber TEXT, IssueDate_Edit TEXT, ImageURL TEXT, ImageURL_ALT TEXT, forced_file INT)')
c.execute('CREATE TABLE IF NOT EXISTS snatched (IssueID TEXT, ComicName TEXT, Issue_Number TEXT, Size INTEGER, DateAdded TEXT, Status TEXT, FolderName TEXT, ComicID TEXT, Provider TEXT, Hash TEXT, crc TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS upcoming (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, IssueDate TEXT, Status TEXT, DisplayComicName TEXT)')
@@ -1006,6 +1006,11 @@ def dbcheck():
except sqlite3.OperationalError:
c.execute('ALTER TABLE comics ADD COLUMN seriesjsonPresent INT')
+ try:
+ c.execute('SELECT cv_removed from comics')
+ except sqlite3.OperationalError:
+ c.execute('ALTER TABLE comics ADD COLUMN cv_removed INT')
+
try:
c.execute('SELECT DynamicComicName from comics')
if CONFIG.DYNAMIC_UPDATE < 3:
diff --git a/mylar/cv.py b/mylar/cv.py
index 93866342..5bc565b9 100755
--- a/mylar/cv.py
+++ b/mylar/cv.py
@@ -15,7 +15,7 @@
import re
import time
import pytz
-from mylar import logger, helpers
+from mylar import db, logger, helpers
import mylar
from bs4 import BeautifulSoup as Soup
from xml.parsers.expat import ExpatError
@@ -1479,3 +1479,28 @@ def basenum_mapping(ordinal=False):
'xi': '9'}
return basenums
+
+def check_that_biatch(comicid, oldinfo, newinfo):
+ failures = 0
+ if newinfo['ComicName'] is not None:
+ if newinfo['ComicName'].lower() != oldinfo['comicname'].lower():
+ failures +=1
+ if newinfo['ComicYear'] is not None:
+ if newinfo['ComicYear'] != oldinfo['comicyear']:
+ failures +=1
+ if newinfo['ComicPublisher'] is not None:
+ if newinfo['ComicPublisher'] != oldinfo['publisher']:
+ failures +=1
+ if newinfo['ComicURL'] is not None:
+ if newinfo['ComicURL'] != oldinfo['detailurl']:
+ failures +=1
+
+ if failures > 2:
+ # if > 50% failure (> 2/4 mismatches) assume removed...
+ logger.warn('[%s] Detected CV removing existing data for series [%s (%s)] and replacing it with [%s (%s)].'
+ 'This is a failure for this series and will be paused until fixed manually' %
+ (failures, oldinfo['comicname'], oldinfo['comicyear'], newinfo['ComicName'], newinfo['ComicYear'])
+ )
+ return True
+
+ return False
diff --git a/mylar/helpers.py b/mylar/helpers.py
index 564403df..bf0cff15 100755
--- a/mylar/helpers.py
+++ b/mylar/helpers.py
@@ -1528,7 +1528,8 @@ def havetotals(refreshit=None):
try:
cpub = re.sub('(N)', '', comic['ComicPublished']).strip()
except Exception as e:
- logger.warn('[Error: %s] No Publisher found for %s - you probably want to Refresh the series when you get a chance.' % (e, comic['ComicName']))
+ if comic['cv_removed'] == 0:
+ logger.warn('[Error: %s] No Publisher found for %s - you probably want to Refresh the series when you get a chance.' % (e, comic['ComicName']))
cpub = None
comictype = comic['Type']
@@ -1553,6 +1554,9 @@ def havetotals(refreshit=None):
else:
comicImage = comic['ComicImage']
+ #cv_removed: 0 = series is present on CV
+ # 1 = series has been removed from CV
+ # 2 = series has been removed from CV but retaining what mylar has in it's db
comics.append({"ComicID": comic['ComicID'],
"ComicName": comic['ComicName'],
@@ -1574,7 +1578,8 @@ def havetotals(refreshit=None):
"DateAdded": comic['LastUpdated'],
"Type": comic['Type'],
"Corrected_Type": comic['Corrected_Type'],
- "displaytype": comictype})
+ "displaytype": comictype,
+ "cv_removed": comic['cv_removed']})
return comics
def filesafe(comic):
diff --git a/mylar/importer.py b/mylar/importer.py
index e3350cc1..aa19e1f9 100644
--- a/mylar/importer.py
+++ b/mylar/importer.py
@@ -113,6 +113,11 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
return {'status': 'incomplete'}
oldcomversion = dbcomic['ComicVersion'] #store the comicversion and chk if it exists before hammering.
+ db_check_values = {'comicname': dbcomic['ComicName'],
+ 'comicyear': dbcomic['ComicYear'],
+ 'publisher': dbcomic['ComicPublisher'],
+ 'detailurl': dbcomic['DetailURL'],
+ 'total_count': dbcomic['Total']}
if dbcomic is None or bypass is False:
newValueDict = {"ComicName": "Comic ID: %s" % (comicid),
@@ -131,6 +136,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
aliases = None
FirstImageSize = 0
old_description = None
+ db_check_values = None
myDB.upsert("comics", newValueDict, controlValueDict)
@@ -159,6 +165,17 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
else:
sortname = comic['ComicName']
+ if db_check_values is not None:
+ if comic['ComicURL'] != db_check_values['detailurl']:
+ logger.warn('[CORRUPT-COMICID-DETECTION-ENABLED] ComicID may have been removed from CV'
+ ' and replaced with an entirely different series/volume. Checking some values'
+ ' to make sure before proceeding...'
+ )
+ i_choose_violence = cv.check_that_biatch(comicid, db_check_values, comic)
+ if i_choose_violence:
+ myDB.upsert("comics", {'Status': 'Paused', 'cv_removed': 1}, {'ComicID': comicid})
+ return {'status': 'incomplete'}
+
comic['Corrected_Type'] = fixed_type
if fixed_type is not None and fixed_type != comic['Type']:
logger.info('Forced Comic Type to : %s' % comic['Corrected_Type'])
diff --git a/mylar/webserve.py b/mylar/webserve.py
index 7398e8fb..116e357d 100644
--- a/mylar/webserve.py
+++ b/mylar/webserve.py
@@ -274,11 +274,23 @@ def config_check(self):
def home(self, **kwargs):
if mylar.START_UP is True:
self.config_check()
+ # pass the proper table colors here
+ if mylar.CONFIG.INTERFACE == 'default':
+ legend_colors = {'paused': '#f9cbe6',
+ 'removed': '#ddd',
+ 'loading': '#ebf5ff',
+ 'failed': '#ffdddd'}
+ else:
+ legend_colors = {'paused': '#bd915a',
+ 'removed': '#382f64',
+ 'loading': '#1c5188',
+ 'failed': '#641716'}
+
if mylar.CONFIG.ALPHAINDEX == True:
comics = helpers.havetotals()
- return serve_template(templatename="index-alphaindex.html", title="Home", comics=comics, alphaindex=mylar.CONFIG.ALPHAINDEX)
+ return serve_template(templatename="index-alphaindex.html", title="Home", comics=comics, alphaindex=mylar.CONFIG.ALPHAINDEX, legend_colors=legend_colors)
else:
- return serve_template(templatename="index.html", title="Home")
+ return serve_template(templatename="index.html", title="Home", legend_colors=legend_colors)
home.exposed = True
def loadhome(self, iDisplayStart=0, iDisplayLength=100, iSortCol_0=5, sSortDir_0="desc", sSearch="", **kwargs):
@@ -366,7 +378,7 @@ def loadhome(self, iDisplayStart=0, iDisplayLength=100, iSortCol_0=5, sSortDir_0
rows = filtered[iDisplayStart:(iDisplayStart + iDisplayLength)]
else:
rows = filtered
- rows = [[row['ComicPublisher'], row['ComicName'], row['ComicYear'], row['LatestIssue'], row['LatestDate'], row['recentstatus'], row['Status'], row['percent'], row['haveissues'], row['totalissues'], row['ComicID'], row['displaytype'], row['ComicVolume']] for row in rows]
+ rows = [[row['ComicPublisher'], row['ComicName'], row['ComicYear'], row['LatestIssue'], row['LatestDate'], row['recentstatus'], row['Status'], row['percent'], row['haveissues'], row['totalissues'], row['ComicID'], row['displaytype'], row['ComicVolume'], row['cv_removed']] for row in rows]
return json.dumps({
'iTotalDisplayRecords': len(filtered),
'iTotalRecords': len(resultlist),
@@ -9315,3 +9327,72 @@ def return_checks(self):
return json.dumps(mylar.REQS)
return_checks.exposed = True
+
+ def fix_cv_removed(self, comicid, opts, delete_dir=False):
+ if delete_dir == 1:
+ delete_dir = True
+ else:
+ delete_dir = False
+ myDB = db.DBConnection()
+ cc = myDB.selectone('SELECT comicname, comicyear, comiclocation from comics where ComicID=?', [comicid]).fetchone()
+ comicname = cc['comicname']
+ comicyear = cc['comicyear']
+ seriesdir = cc['comiclocation']
+
+ if opts == 'delete':
+ if not cc:
+ logger.warn('[CV-REMOVAL-DETECTION] Unable to locate comicid in db - this series does not exist currently..')
+ return json.dumps({'status': 'failure', 'message': 'Series has already been removed from the watchlist'})
+
+ myDB.action("DELETE FROM comics WHERE ComicID=?", [comicid])
+ myDB.action("DELETE FROM issues WHERE ComicID=?", [comicid])
+ if mylar.CONFIG.ANNUALS_ON:
+ myDB.action("DELETE FROM annuals WHERE ComicID=?", [comicid])
+ myDB.action('DELETE from upcoming WHERE ComicID=?', [comicid])
+ myDB.action('DELETE from readlist WHERE ComicID=?', [comicid])
+ myDB.action('UPDATE weekly SET Status="Skipped" WHERE ComicID=? AND Status="Wanted"', [comicid])
+ warnings = 0
+ if delete_dir:
+ logger.fdebug('Remove directory on series removal enabled.')
+ if seriesdir is not None:
+ if os.path.exists(seriesdir):
+ logger.fdebug('Attempting to remove the directory and contents of : %s' % seriesdir)
+ try:
+ shutil.rmtree(seriesdir)
+ except:
+ logger.warn('Unable to remove directory after removing series from Mylar.')
+ warnings += 1
+ else:
+ logger.info('Successfully removed directory: %s' % (seriesdir))
+ else:
+ logger.warn('Unable to remove directory as it does not exist in : %s' % seriesdir)
+ warnings += 1
+ else:
+ logger.warn('Unable to remove directory as it does not exist.')
+ warnings += 1
+
+ helpers.ComicSort(sequence='update')
+
+ c_image = os.path.join(mylar.CONFIG.CACHE_DIR, comicid + '.jpg')
+ if os.path.exists(c_image):
+ try:
+ os.remove(c_image)
+ except Exception as e:
+ warnings += 1
+ logger.warn('[CV-REMOVAL-DETECTION] Unable to remove the image file from the cache (%s).'
+ ' You may have to delete the file manually' % c_image)
+ else:
+ logger.fdebug('image file already removed from cache for %s (%s) - [%s]' % (comicname, comicyear, comicid))
+
+ linemsg = 'Successfully removed %s (%s) from the watchlist' % (comicname, comicyear)
+ if warnings > 0:
+ linemsg += '[%s warnings]' % warnings
+
+ return json.dumps({'status': 'success', 'message': linemsg})
+
+ else:
+ myDB.upsert("comics", {'Status': 'Paused', 'cv_removed': 2}, {'ComicID': comicid})
+ logger.info('[CV-REMOVAL-DETECTION] Successfully retained %s (%s) and is now in a Paused status' % (comicname, comicyear))
+ linemsg = 'Successfully Paused %s (%s) due to CV removal' % (comicname, comicyear)
+ return json.dumps({'status': 'success', 'message': linemsg})
+ fix_cv_removed.exposed = True
From 4bbf9ab02fb8b35427e61592f9ee59a61bdd2920 Mon Sep 17 00:00:00 2001
From: evilhero <909424+evilhero@users.noreply.github.com>
Date: Fri, 16 Feb 2024 00:45:52 -0500
Subject: [PATCH 23/32] bunch of late night fixes...(#1510)(#1507) and some
more
---
data/interfaces/default/base.html | 6 +++---
mylar/__init__.py | 2 ++
mylar/filechecker.py | 21 ++++++++++++---------
mylar/helpers.py | 16 ++++++++++++++--
mylar/importer.py | 19 ++++++++++++++++---
mylar/updater.py | 2 +-
mylar/webserve.py | 1 +
7 files changed, 49 insertions(+), 18 deletions(-)
diff --git a/data/interfaces/default/base.html b/data/interfaces/default/base.html
index 04381f14..00762de7 100755
--- a/data/interfaces/default/base.html
+++ b/data/interfaces/default/base.html
@@ -354,7 +354,7 @@
if (data.status == 'success'){
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
console.log('data.comicid:'+data.comicid)
- if ( (data.tables == 'both' || data.tables == 'tables') && ( document.body.innerHTML.search(data.comicid) || tt.value == 'history' || tt.value == 'search_results') ){
+ if ( ( tt.value != "config" ) && (data.tables == 'both' || data.tables == 'tables') && ( document.body.innerHTML.search(data.comicid) || tt.value == "history" || tt.value == "search_results") ){
console.log('reloading table1...');
reload_table();
}
@@ -362,7 +362,7 @@
console.log('reloading table2...');
reload_table();
}
- if (data.tables == 'both' || data.tables == 'tabs'){
+ if ( (data.tables == 'both' || data.tables == 'tabs') && ( tt.value != "config") ) {
reload_tabs();
}
if( data.comicid == cid && document.getElementById("page_name").value == 'series_detail'){
@@ -424,7 +424,7 @@
var tables = $('table.display').DataTable();
var tt = document.getElementById("page_name");
if(typeof(tt) != 'undefined' && tt != null){
- if (tt.value != "weekly" && tt.value != "import_results" && tt.value != "manage_comics" && tt.value != "manage_issues" && tt.value != "manage_failed" && tt.value != "reading_list" && tt.value != "storyarcs_index" && tt.value != "storyarc_detail") {
+ if (tt.value != "weekly" && tt.value != "import_results" && tt.value != "manage_comics" && tt.value != "manage_issues" && tt.value != "manage_failed" && tt.value != "reading_list" && tt.value != "storyarcs_index" && tt.value != "storyarc_detail" && tt.value != "config") {
// this is required so it doesn't error if on the weekly page
// once weekly & other pages are converted to dynamic loading, this if can be removed
tables.ajax.reload(null,false);
diff --git a/mylar/__init__.py b/mylar/__init__.py
index e3a3f45d..234a2d14 100644
--- a/mylar/__init__.py
+++ b/mylar/__init__.py
@@ -179,6 +179,8 @@
'ALPHA',
'OMEGA',
'BLACK',
+ 'DARK',
+ 'LIGHT',
'AU',
'AI',
'INH',
diff --git a/mylar/filechecker.py b/mylar/filechecker.py
index f275b9e8..62c56693 100755
--- a/mylar/filechecker.py
+++ b/mylar/filechecker.py
@@ -786,15 +786,18 @@ def parseit(self, path, filename, subpath=None):
lastissue_label = sf
lastissue_mod_position = file_length
elif x > 0:
- logger.fdebug('I have encountered a decimal issue #: %s' % sf)
- possible_issuenumbers.append({'number': sf,
- 'position': split_file.index(sf, lastissue_position), #modfilename.find(sf)})
- 'mod_position': self.char_file_position(modfilename, sf, lastmod_position),
- 'validcountchk': validcountchk})
+ if x == float('inf') and split_file.index(sf, lastissue_position) <= 2:
+ logger.fdebug('infinity wording detected - position places it within series title boundaries..')
+ else:
+ logger.fdebug('I have encountered a decimal issue #: %s' % sf)
+ possible_issuenumbers.append({'number': sf,
+ 'position': split_file.index(sf, lastissue_position), #modfilename.find(sf)})
+ 'mod_position': self.char_file_position(modfilename, sf, lastmod_position),
+ 'validcountchk': validcountchk})
- lastissue_position = split_file.index(sf, lastissue_position)
- lastissue_label = sf
- lastissue_mod_position = file_length
+ lastissue_position = split_file.index(sf, lastissue_position)
+ lastissue_label = sf
+ lastissue_mod_position = file_length
else:
raise ValueError
except ValueError as e:
@@ -1501,7 +1504,7 @@ def matchIT(self, series_info):
if qmatch_chk is None:
qmatch_chk = 'match'
if qmatch_chk is not None:
- #logger.fdebug('[MATCH: ' + series_info['series_name'] + '] ' + filename)
+ #logger.fdebug('[%s][MATCH: %s][seriesALT: %s] %s' % (qmatch_chk, seriesalt, series_info['series_name'], filename))
enable_annual = False
annual_comicid = None
if any(re.sub('[\|\s]','', x.lower()).strip() == re.sub('[\|\s]','', nspace_seriesname.lower()).strip() for x in self.AS_Alt):
diff --git a/mylar/helpers.py b/mylar/helpers.py
index bf0cff15..2f2d58e4 100755
--- a/mylar/helpers.py
+++ b/mylar/helpers.py
@@ -1116,8 +1116,20 @@ def issuedigits(issnum):
try:
int_issnum = (int(issb4dec) * 1000) + (int(issaftdec) * 10)
except ValueError:
- #logger.fdebug('This has no issue # for me to get - Either a Graphic Novel or one-shot.')
- int_issnum = 999999999999999
+ try:
+ ordtot = 0
+ if any(ext == issaftdec.upper() for ext in mylar.ISSUE_EXCEPTIONS):
+ inu = 0
+ while (inu < len(issaftdec)):
+ ordtot += ord(issaftdec[inu].lower()) #lower-case the letters for simplicty
+ inu+=1
+ int_issnum = (int(issb4dec) * 1000) + ordtot
+ except Exception as e:
+ logger.warn('error: %s' % e)
+ ordtot = 0
+ if ordtot == 0:
+ #logger.error('This has no issue # for me to get - Either a Graphic Novel or one-shot.')
+ int_issnum = 999999999999999
elif all([ '[' in issnum, ']' in issnum ]):
issnum_tmp = issnum.find('[')
int_issnum = int(issnum[:issnum_tmp].strip()) * 1000
diff --git a/mylar/importer.py b/mylar/importer.py
index aa19e1f9..33de4227 100644
--- a/mylar/importer.py
+++ b/mylar/importer.py
@@ -1384,9 +1384,22 @@ def updateissuedata(comicid, comicname=None, issued=None, comicIssues=None, call
#int_issnum = str(issnum)
int_issnum = (int(issb4dec) * 1000) + (int(issaftdec) * 10)
except ValueError:
- logger.error('This has no issue # for me to get - Either a Graphic Novel or one-shot.')
- updater.no_searchresults(comicid)
- return {'status': 'failure'}
+ try:
+ ordtot = 0
+ if any(ext == issaftdec.upper() for ext in mylar.ISSUE_EXCEPTIONS):
+ logger.fdebug('issue_exception detected..')
+ inu = 0
+ while (inu < len(issaftdec)):
+ ordtot += ord(issaftdec[inu].lower()) #lower-case the letters for simplicty
+ inu+=1
+ int_issnum = (int(issb4dec) * 1000) + ordtot
+ except Exception as e:
+ logger.warn('error: %s' % e)
+ ordtot = 0
+ if ordtot == 0:
+ logger.error('This has no issue # for me to get - Either a Graphic Novel or one-shot.')
+ updater.no_searchresults(comicid)
+ return {'status': 'failure'}
elif all([ '[' in issnum, ']' in issnum ]):
issnum_tmp = issnum.find('[')
int_issnum = int(issnum[:issnum_tmp].strip()) * 1000
diff --git a/mylar/updater.py b/mylar/updater.py
index 4852d30e..1f8e9b5a 100755
--- a/mylar/updater.py
+++ b/mylar/updater.py
@@ -1787,7 +1787,7 @@ def forceRescan(ComicID, archive=None, module=None, recheck=False):
if pause_status:
issStatus = old_status
- logger.fdefbug('[PAUSE_ANNUAL_CHECK_STATUS_CHECK] series is paused, keeping status of %s for issue #%s' % (issStatus, chk['Issue_Number']))
+ logger.fdebug('[PAUSE_ANNUAL_CHECK_STATUS_CHECK] series is paused, keeping status of %s for issue #%s' % (issStatus, chk['Issue_Number']))
else:
#if old_status == "Skipped":
# if mylar.CONFIG.AUTOWANT_ALL:
diff --git a/mylar/webserve.py b/mylar/webserve.py
index 116e357d..ca296857 100644
--- a/mylar/webserve.py
+++ b/mylar/webserve.py
@@ -431,6 +431,7 @@ def comicDetails(self, ComicID, addbyid=0, **kwargs):
'ComicLocation': None,
'AlternateSearch': None,
'AlternateFileName': None,
+ 'cv_removed': 0,
'DetailURL': 'https://comicvine.com/volume/4050-%s' % ComicID}
else:
secondary_folders = None
From e63b4b496b0b71767ccdee7f4beb1322651a70b2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Esteban=20S=C3=A1nchez?=
Date: Mon, 19 Feb 2024 12:49:48 +0100
Subject: [PATCH 24/32] Fixed typo
---
data/interfaces/default/comicdetails_update.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/data/interfaces/default/comicdetails_update.html b/data/interfaces/default/comicdetails_update.html
index 3ea23230..88794a1d 100755
--- a/data/interfaces/default/comicdetails_update.html
+++ b/data/interfaces/default/comicdetails_update.html
@@ -417,7 +417,7 @@
- Use this insetad of CV name during post-processing / renaming
+ Use this instead of CV name during post-processing / renaming