desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'expose the api-builder template'
| @cherrypy.expose
def builder(self):
| t = webserve.PageTemplate(file='apiBuilder.tmpl')
def titler(x):
if (not x):
return x
if ((not x.lower().startswith('a to ')) and x.lower().startswith('a ')):
x = x[2:]
elif x.lower().startswith('an '):
x = x[3:]
elif x.lower().startswith('the '):
x = x[4:]
return x
t.sortedShowList = sorted(sickbeard.showList, (lambda x, y: cmp(titler(x.name), titler(y.name))))[0:100]
myDB = db.DBConnection(row_type='dict')
seasonSQLResults = {}
episodeSQLResults = {}
for curShow in t.sortedShowList:
seasonSQLResults[curShow.tvdbid] = myDB.select('SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC', [curShow.tvdbid])
for curShow in t.sortedShowList:
episodeSQLResults[curShow.tvdbid] = myDB.select('SELECT DISTINCT season,episode FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC', [curShow.tvdbid])
t.seasonSQLResults = seasonSQLResults
t.episodeSQLResults = episodeSQLResults
myDB.connection.close()
if (len(sickbeard.API_KEY) == 32):
t.apikey = sickbeard.API_KEY
else:
t.apikey = 'api key not generated'
return webserve._munge(t)
|
'set cherrypy response to json'
| def _out_as_json(self, dict):
| response = cherrypy.response
request = cherrypy.request
response.headers['Content-Type'] = 'application/json;charset=UTF-8'
try:
out = json.dumps(dict, indent=self.intent, sort_keys=True)
callback = (request.params.get('callback') or request.params.get('jsonp'))
if (callback is not None):
out = (((callback + '(') + out) + ');')
except Exception as e:
logger.log((u'API :: ' + traceback.format_exc()), logger.DEBUG)
out = (((('{"result":"' + result_type_map[RESULT_ERROR]) + '", "message": "error while composing output: "') + ex(e)) + '"}')
return out
|
'validate api key and log result'
| def _grand_access(self, realKey, args, kwargs):
| remoteIp = cherrypy.request.remote.ip
apiKey = kwargs.get('apikey', None)
if (not apiKey):
if args:
apiKey = args[0]
args = args[1:]
else:
del kwargs['apikey']
if (sickbeard.USE_API is not True):
msg = ((u'API :: ' + remoteIp) + ' - SB API Disabled. ACCESS DENIED')
return (False, msg, args, kwargs)
elif (apiKey == realKey):
msg = ((u'API :: ' + remoteIp) + ' - gave correct API KEY. ACCESS GRANTED')
return (True, msg, args, kwargs)
elif (not apiKey):
msg = ((u'API :: ' + remoteIp) + ' - gave NO API KEY. ACCESS DENIED')
return (False, msg, args, kwargs)
else:
msg = ((((u'API :: ' + remoteIp) + ' - gave WRONG API KEY ') + apiKey) + '. ACCESS DENIED')
return (False, msg, args, kwargs)
|
'function to check passed params for the shorthand wrapper
and to detect missing/required param'
| def check_params(self, args, kwargs, key, default, required, type, allowedValues):
| missing = True
orgDefault = default
if (type == 'bool'):
allowedValues = [0, 1]
if args:
default = args[0]
missing = False
args = args[1:]
if kwargs.get(key):
default = kwargs.get(key)
missing = False
if required:
try:
self._missing
self._requiredParams.append(key)
except AttributeError:
self._missing = []
self._requiredParams = {}
self._requiredParams[key] = {'allowedValues': allowedValues, 'defaultValue': orgDefault}
if (missing and (key not in self._missing)):
self._missing.append(key)
else:
try:
self._optionalParams[key] = {'allowedValues': allowedValues, 'defaultValue': orgDefault}
except AttributeError:
self._optionalParams = {}
self._optionalParams[key] = {'allowedValues': allowedValues, 'defaultValue': orgDefault}
if default:
default = self._check_param_type(default, key, type)
if (type == 'bool'):
type = []
self._check_param_value(default, key, allowedValues)
return (default, args)
|
'checks if value can be converted / parsed to type
will raise an error on failure
or will convert it to type and return new converted value
can check for:
- int: will be converted into int
- bool: will be converted to False / True
- list: will always return a list
- string: will do nothing for now
- ignore: will ignore it, just like "string"'
| def _check_param_type(self, value, name, type):
| error = False
if (type == 'int'):
if _is_int(value):
value = int(value)
else:
error = True
elif (type == 'bool'):
if (value in ('0', '1')):
value = bool(int(value))
elif (value in ('true', 'True', 'TRUE')):
value = True
elif (value in ('false', 'False', 'FALSE')):
value = False
else:
error = True
elif (type == 'list'):
value = value.split('|')
elif (type == 'string'):
pass
elif (type == 'ignore'):
pass
else:
logger.log(((u'API :: Invalid param type set ' + str(type)) + ' can not check or convert ignoring it'), logger.ERROR)
if error:
raise ApiError(((((((u"param: '" + str(name)) + "' with given value: '") + str(value)) + "' could not be parsed into '") + str(type)) + "'"))
return value
|
'will check if value (or all values in it ) are in allowed values
will raise an exception if value is "out of range"
if bool(allowedValue) == False a check is not performed and all values are excepted'
| def _check_param_value(self, value, name, allowedValues):
| if allowedValues:
error = False
if isinstance(value, list):
for item in value:
if (not (item in allowedValues)):
error = True
elif (not (value in allowedValues)):
error = True
if error:
raise ApiError(((((((u"param: '" + str(name)) + "' with given value: '") + str(value)) + "' is out of allowed range '") + str(allowedValues)) + "'"))
|
'internal function wrapper'
| def run(self):
| args = ((self.sid,) + self.origArgs)
if self.e:
return CMD_Episode(args, self.kwargs).run()
elif self.s:
return CMD_ShowSeasons(args, self.kwargs).run()
else:
return CMD_Show(args, self.kwargs).run()
|
'display help information for a given subject/command'
| def run(self):
| if (self.subject in _functionMaper):
out = _responds(RESULT_SUCCESS, _functionMaper.get(self.subject)((), {'help': 1}).run())
else:
out = _responds(RESULT_FAILURE, msg='No such cmd')
return out
|
'display the coming episodes'
| def run(self):
| today = datetime.date.today().toordinal()
next_week = (datetime.date.today() + datetime.timedelta(days=7)).toordinal()
recently = (datetime.date.today() - datetime.timedelta(days=3)).toordinal()
done_show_list = []
qualList = ((Quality.DOWNLOADED + Quality.SNATCHED) + [ARCHIVED, IGNORED])
myDB = db.DBConnection(row_type='dict')
sql_results = myDB.select((("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season > 0 AND airdate >= ? AND airdate < ? AND tv_shows.tvdb_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join((['?'] * len(qualList)))) + ')'), ([today, next_week] + qualList))
for cur_result in sql_results:
done_show_list.append(int(cur_result['tvdbid']))
more_sql_results = myDB.select((((("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes outer_eps, tv_shows WHERE season > 0 AND showid NOT IN (" + ','.join((['?'] * len(done_show_list)))) + ') AND tv_shows.tvdb_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season > 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (') + ','.join((['?'] * len((Quality.DOWNLOADED + Quality.SNATCHED))))) + ')'), (((done_show_list + [next_week]) + Quality.DOWNLOADED) + Quality.SNATCHED))
sql_results += more_sql_results
more_sql_results = myDB.select((("SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'tvdbid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season > 0 AND tv_shows.tvdb_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join((['?'] * len(qualList)))) + ')'), ([today, recently, WANTED] + qualList))
sql_results += more_sql_results
sorts = {'date': (lambda x, y: cmp(int(x['airdate']), int(y['airdate']))), 'show': (lambda a, b: cmp(a['show_name'], b['show_name'])), 'network': (lambda a, b: cmp(a['network'], b['network']))}
sql_results.sort(sorts[self.sort])
finalEpResults = {}
for curType in self.type:
finalEpResults[curType] = []
for ep in sql_results:
'\n Missed: yesterday... (less than 1week)\n Today: today\n Soon: tomorrow till next week\n Later: later than next week\n '
if (ep['paused'] and (not self.paused)):
continue
status = 'soon'
if (ep['airdate'] < today):
status = 'missed'
elif (ep['airdate'] >= next_week):
status = 'later'
elif ((ep['airdate'] >= today) and (ep['airdate'] < next_week)):
if (ep['airdate'] == today):
status = 'today'
else:
status = 'soon'
if ((self.type is not None) and (not (status in self.type))):
continue
ordinalAirdate = int(ep['airdate'])
if (not ep['network']):
ep['network'] = ''
ep['airdate'] = _ordinal_to_dateForm(ordinalAirdate)
ep['quality'] = _get_quality_string(ep['quality'])
ep['airs'] = str(ep['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
ep['weekday'] = (1 + datetime.date.fromordinal(ordinalAirdate).weekday())
if (not (status in finalEpResults)):
finalEpResults[status] = []
finalEpResults[status].append(ep)
myDB.connection.close()
return _responds(RESULT_SUCCESS, finalEpResults)
|
'display detailed info about an episode'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
myDB = db.DBConnection(row_type='dict')
sqlResults = myDB.select('SELECT name, description, airdate, status, location, file_size, release_name FROM tv_episodes WHERE showid = ? AND episode = ? AND season = ?', [self.tvdbid, self.e, self.s])
if (not (len(sqlResults) == 1)):
raise ApiError('Episode not found')
episode = sqlResults[0]
showPath = None
try:
showPath = showObj.location
except sickbeard.exceptions.ShowDirNotFoundException:
pass
if ((bool(self.fullPath) is True) and showPath):
pass
elif ((bool(self.fullPath) is False) and showPath):
showPathLength = (len(showPath) + 1)
episode['location'] = episode['location'][showPathLength:]
elif (not showPath):
episode['location'] = ''
episode['airdate'] = _ordinal_to_dateForm(episode['airdate'])
(status, quality) = Quality.splitCompositeStatus(int(episode['status']))
episode['status'] = _get_status_Strings(status)
episode['quality'] = _get_quality_string(quality)
if episode['file_size']:
episode['file_size_human'] = _sizeof_fmt(episode['file_size'])
else:
episode['file_size_human'] = ''
myDB.connection.close()
return _responds(RESULT_SUCCESS, episode)
|
'search for an episode'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
epObj = showObj.getEpisode(int(self.s), int(self.e))
if isinstance(epObj, str):
return _responds(RESULT_FAILURE, msg='Episode not found')
ep_queue_item = search_queue.ManualSearchQueueItem(epObj)
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item)
while (ep_queue_item.success is None):
time.sleep(1)
if ep_queue_item.success:
(status, quality) = Quality.splitCompositeStatus(epObj.status)
return _responds(RESULT_SUCCESS, {'quality': _get_quality_string(quality)}, (('Snatched (' + _get_quality_string(quality)) + ')'))
return _responds(RESULT_FAILURE, msg='Unable to find episode')
|
'set status of an episode or a season (when no ep is provided)'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
for status in statusStrings.statusStrings:
if (str(statusStrings[status]).lower() == str(self.status).lower()):
self.status = status
break
else:
raise ApiError('The status string could not be matched to a status. Report to Devs!')
ep_list = []
if self.e:
epObj = showObj.getEpisode(self.s, self.e)
if (epObj is None):
return _responds(RESULT_FAILURE, msg='Episode not found')
ep_list = [epObj]
else:
ep_list = showObj.getAllEpisodes(season=self.s)
def _epResult(result_code, ep, msg=''):
return {'season': ep.season, 'episode': ep.episode, 'status': _get_status_Strings(ep.status), 'result': result_type_map[result_code], 'message': msg}
ep_results = []
failure = False
start_backlog = False
ep_segment = None
for epObj in ep_list:
if ((ep_segment is None) and (self.status == WANTED)):
if showObj.air_by_date:
ep_segment = str(epObj.airdate)[:7]
else:
ep_segment = epObj.season
with epObj.lock:
if (epObj.status == UNAIRED):
if (self.e is not None):
ep_results.append(_epResult(RESULT_FAILURE, epObj, 'Refusing to change status because it is UNAIRED'))
failure = True
continue
if ((epObj.status in Quality.DOWNLOADED) and (not self.force)):
ep_results.append(_epResult(RESULT_FAILURE, epObj, 'Refusing to change status because it is already marked as DOWNLOADED'))
failure = True
continue
epObj.status = self.status
epObj.saveToDB()
if (self.status == WANTED):
start_backlog = True
ep_results.append(_epResult(RESULT_SUCCESS, epObj))
extra_msg = ''
if start_backlog:
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, ep_segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item)
logger.log(((((u'API :: Starting backlog for ' + showObj.name) + ' season ') + str(ep_segment)) + ' because some episodes were set to WANTED'))
extra_msg = ' Backlog started'
if failure:
return _responds(RESULT_FAILURE, ep_results, ('Failed to set all or some status. Check data.' + extra_msg))
else:
return _responds(RESULT_SUCCESS, msg=('All status set successfully.' + extra_msg))
|
'display scene exceptions for all or a given show'
| def run(self):
| myDB = db.DBConnection('cache.db', row_type='dict')
if (self.tvdbid is None):
sqlResults = myDB.select("SELECT show_name, tvdb_id AS 'tvdbid' FROM scene_exceptions")
scene_exceptions = {}
for row in sqlResults:
tvdbid = row['tvdbid']
if (not (tvdbid in scene_exceptions)):
scene_exceptions[tvdbid] = []
scene_exceptions[tvdbid].append(row['show_name'])
else:
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
sqlResults = myDB.select("SELECT show_name, tvdb_id AS 'tvdbid' FROM scene_exceptions WHERE tvdb_id = ?", [self.tvdbid])
scene_exceptions = []
for row in sqlResults:
scene_exceptions.append(row['show_name'])
myDB.connection.close()
return _responds(RESULT_SUCCESS, scene_exceptions)
|
'display sickbeard downloaded/snatched history'
| def run(self):
| typeCodes = []
if (self.type == 'downloaded'):
self.type = 'Downloaded'
typeCodes = Quality.DOWNLOADED
elif (self.type == 'snatched'):
self.type = 'Snatched'
typeCodes = Quality.SNATCHED
else:
typeCodes = (Quality.SNATCHED + Quality.DOWNLOADED)
myDB = db.DBConnection(row_type='dict')
ulimit = min(int(self.limit), 100)
if (ulimit == 0):
sqlResults = myDB.select((('SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.tvdb_id AND action in (' + ','.join((['?'] * len(typeCodes)))) + ') ORDER BY date DESC'), typeCodes)
else:
sqlResults = myDB.select((('SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.tvdb_id AND action in (' + ','.join((['?'] * len(typeCodes)))) + ') ORDER BY date DESC LIMIT ?'), (typeCodes + [ulimit]))
results = []
for row in sqlResults:
(status, quality) = Quality.splitCompositeStatus(int(row['action']))
status = _get_status_Strings(status)
if (self.type and (not (status == self.type))):
continue
row['status'] = status
row['quality'] = _get_quality_string(quality)
row['date'] = _historyDate_to_dateTimeForm(str(row['date']))
del row['action']
_rename_element(row, 'showid', 'tvdbid')
row['resource_path'] = os.path.dirname(row['resource'])
row['resource'] = os.path.basename(row['resource'])
results.append(row)
myDB.connection.close()
return _responds(RESULT_SUCCESS, results)
|
'clear sickbeard\'s history'
| def run(self):
| myDB = db.DBConnection()
myDB.action('DELETE FROM history WHERE 1=1')
myDB.action('VACUUM')
myDB.connection.close()
return _responds(RESULT_SUCCESS, msg='History cleared')
|
'trim sickbeard\'s history'
| def run(self):
| myDB = db.DBConnection()
myDB.action(('DELETE FROM history WHERE date < ' + str((datetime.datetime.today() - datetime.timedelta(days=30)).strftime(history.dateFormat))))
myDB.action('VACUUM')
myDB.connection.close()
return _responds(RESULT_SUCCESS, msg='Removed history entries greater than 30 days old')
|
'view sickbeard\'s log'
| def run(self):
| minLevel = logger.reverseNames[str(self.min_level).upper()]
data = []
if os.path.isfile(logger.sb_log_instance.log_file_path):
with ek.ek(open, logger.sb_log_instance.log_file_path) as f:
data = f.readlines()
regex = '^(\\d\\d\\d\\d)\\-(\\d\\d)\\-(\\d\\d)\\s*(\\d\\d)\\:(\\d\\d):(\\d\\d)\\s*([A-Z]+)\\s*(.+?)\\s*\\:\\:\\s*(.*)$'
finalData = []
numLines = 0
lastLine = False
numToShow = min(50, len(data))
for x in reversed(data):
x = x.decode('utf-8')
match = re.match(regex, x)
if match:
level = match.group(7)
if (level not in logger.reverseNames):
lastLine = False
continue
if (logger.reverseNames[level] >= minLevel):
lastLine = True
finalData.append(x.rstrip('\n'))
else:
lastLine = False
continue
elif lastLine:
finalData.append(('AA' + x))
numLines += 1
if (numLines >= numToShow):
break
return _responds(RESULT_SUCCESS, finalData)
|
'Starts the postprocess'
| def run(self):
| pp_options = {}
if ((not self.path) and (not sickbeard.TV_DOWNLOAD_DIR)):
return _responds(RESULT_FAILURE, msg='You need to provide a path or set TV Download Dir')
if (not self.path):
self.path = sickbeard.TV_DOWNLOAD_DIR
if bool(self.force_replace):
pp_options['force_replace'] = True
data = processTV.processDir(self.path, method='Manual', pp_options=pp_options)
if (not self.return_data):
data = ''
return _responds(RESULT_SUCCESS, data=data, msg=('Started postprocess for %s' % self.path))
|
'display misc sickbeard related information'
| def run(self):
| data = {'sb_version': sickbeard.version.SICKBEARD_VERSION, 'api_version': Api.version, 'api_commands': sorted(_functionMaper.keys())}
return _responds(RESULT_SUCCESS, data)
|
'add a parent directory to sickbeard\'s config'
| def run(self):
| self.location = urllib.unquote_plus(self.location)
location_matched = 0
if (not ek.ek(os.path.isdir, self.location)):
return _responds(RESULT_FAILURE, msg='Location is invalid')
root_dirs = []
if (sickbeard.ROOT_DIRS == ''):
self.default = 1
else:
root_dirs = sickbeard.ROOT_DIRS.split('|')
index = int(sickbeard.ROOT_DIRS.split('|')[0])
root_dirs.pop(0)
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
for x in root_dirs:
if (x == self.location):
location_matched = 1
if (self.default == 1):
index = root_dirs.index(self.location)
break
if (location_matched == 0):
if (self.default == 1):
index = 0
root_dirs.insert(0, self.location)
else:
root_dirs.append(self.location)
root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs]
root_dirs_new.insert(0, index)
root_dirs_new = '|'.join((unicode(x) for x in root_dirs_new))
sickbeard.ROOT_DIRS = root_dirs_new
return _responds(RESULT_SUCCESS, _getRootDirs(), msg='Root directories updated')
|
'query the scheduler'
| def run(self):
| myDB = db.DBConnection()
sqlResults = myDB.select('SELECT last_backlog FROM info')
backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused()
backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress()
searchStatus = sickbeard.currentSearchScheduler.action.amActive
nextSearch = str(sickbeard.currentSearchScheduler.timeLeft()).split('.')[0]
nextBacklog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING)
myDB.connection.close()
data = {'backlog_is_paused': int(backlogPaused), 'backlog_is_running': int(backlogRunning), 'last_backlog': _ordinal_to_dateForm(sqlResults[0]['last_backlog']), 'search_is_running': int(searchStatus), 'next_search': nextSearch, 'next_backlog': nextBacklog}
return _responds(RESULT_SUCCESS, data)
|
'delete a parent directory from sickbeard\'s config'
| def run(self):
| if (sickbeard.ROOT_DIRS == ''):
return _responds(RESULT_FAILURE, _getRootDirs(), msg='No root directories detected')
root_dirs_new = []
root_dirs = sickbeard.ROOT_DIRS.split('|')
index = int(root_dirs[0])
root_dirs.pop(0)
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
old_root_dir = root_dirs[index]
for curRootDir in root_dirs:
if (not (curRootDir == self.location)):
root_dirs_new.append(curRootDir)
else:
newIndex = 0
for (curIndex, curNewRootDir) in enumerate(root_dirs_new):
if (curNewRootDir is old_root_dir):
newIndex = curIndex
break
root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs_new]
if (len(root_dirs_new) > 0):
root_dirs_new.insert(0, newIndex)
root_dirs_new = '|'.join((unicode(x) for x in root_dirs_new))
sickbeard.ROOT_DIRS = root_dirs_new
return _responds(RESULT_SUCCESS, _getRootDirs(), msg='Root directory deleted')
|
'force the episode search early'
| def run(self):
| result = sickbeard.currentSearchScheduler.forceRun()
if result:
return _responds(RESULT_SUCCESS, msg='Episode search forced')
return _responds(RESULT_FAILURE, msg='Can not search for episode')
|
'get sickbeard user defaults'
| def run(self):
| (anyQualities, bestQualities) = _mapQuality(sickbeard.QUALITY_DEFAULT)
data = {'status': statusStrings[sickbeard.STATUS_DEFAULT].lower(), 'flatten_folders': int(sickbeard.FLATTEN_FOLDERS_DEFAULT), 'initial': anyQualities, 'archive': bestQualities, 'future_show_paused': int(sickbeard.COMING_EPS_DISPLAY_PAUSED)}
return _responds(RESULT_SUCCESS, data)
|
'get the parent directories defined in sickbeard\'s config'
| def run(self):
| return _responds(RESULT_SUCCESS, _getRootDirs())
|
'pause the backlog search'
| def run(self):
| if (self.pause is True):
sickbeard.searchQueueScheduler.action.pause_backlog()
return _responds(RESULT_SUCCESS, msg='Backlog paused')
else:
sickbeard.searchQueueScheduler.action.unpause_backlog()
return _responds(RESULT_SUCCESS, msg='Backlog unpaused')
|
'check to see if sickbeard is running'
| def run(self):
| cherrypy.response.headers['Cache-Control'] = 'max-age=0,no-cache,no-store'
if sickbeard.started:
return _responds(RESULT_SUCCESS, {'pid': sickbeard.PID}, 'Pong')
else:
return _responds(RESULT_SUCCESS, msg='Pong')
|
'restart sickbeard'
| def run(self):
| threading.Timer(2, sickbeard.invoke_restart, [False]).start()
return _responds(RESULT_SUCCESS, msg='SickBeard is restarting...')
|
'search for show at tvdb with a given string and language'
| def run(self):
| if (self.name and (not self.tvdbid)):
baseURL = 'http://thetvdb.com/api/GetSeries.php?'
params = {'seriesname': str(self.name).encode('utf-8'), 'language': self.lang}
finalURL = (baseURL + urllib.urlencode(params))
urlData = sickbeard.helpers.getURL(finalURL)
if (urlData is None):
return _responds(RESULT_FAILURE, msg='Did not get result from tvdb')
else:
try:
seriesXML = etree.ElementTree(etree.XML(urlData))
except Exception as e:
logger.log((((u'API :: Unable to parse XML for some reason: ' + ex(e)) + ' from XML: ') + urlData), logger.ERROR)
return _responds(RESULT_FAILURE, msg='Unable to read result from tvdb')
series = seriesXML.getiterator('Series')
results = []
for curSeries in series:
results.append({'tvdbid': int(curSeries.findtext('seriesid')), 'name': curSeries.findtext('SeriesName'), 'first_aired': curSeries.findtext('FirstAired')})
lang_id = self.valid_languages[self.lang]
return _responds(RESULT_SUCCESS, {'results': results, 'langid': lang_id})
elif self.tvdbid:
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
lang_id = self.valid_languages[self.lang]
if (self.lang and (not (self.lang == 'en'))):
ltvdb_api_parms['language'] = self.lang
t = tvdb_api.Tvdb(actors=False, **ltvdb_api_parms)
try:
myShow = t[int(self.tvdbid)]
except (tvdb_exceptions.tvdb_shownotfound, tvdb_exceptions.tvdb_error):
logger.log((u'API :: Unable to find show with id ' + str(self.tvdbid)), logger.WARNING)
return _responds(RESULT_SUCCESS, {'results': [], 'langid': lang_id})
if (not myShow.data['seriesname']):
logger.log(((u'API :: Found show with tvdbid ' + str(self.tvdbid)) + ', however it contained no show name'), logger.DEBUG)
return _responds(RESULT_FAILURE, msg='Show contains no name, invalid result')
showOut = [{'tvdbid': self.tvdbid, 'name': unicode(myShow.data['seriesname']), 'first_aired': myShow.data['firstaired']}]
return _responds(RESULT_SUCCESS, {'results': showOut, 'langid': lang_id})
else:
return _responds(RESULT_FAILURE, msg='Either tvdbid or name is required')
|
'set sickbeard user defaults'
| def run(self):
| quality_map = {'sdtv': Quality.SDTV, 'sddvd': Quality.SDDVD, 'hdtv': Quality.HDTV, 'rawhdtv': Quality.RAWHDTV, 'fullhdtv': Quality.FULLHDTV, 'hdwebdl': Quality.HDWEBDL, 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, 'unknown': Quality.UNKNOWN}
iqualityID = []
aqualityID = []
if self.initial:
for quality in self.initial:
iqualityID.append(quality_map[quality])
if self.archive:
for quality in self.archive:
aqualityID.append(quality_map[quality])
if (iqualityID or aqualityID):
sickbeard.QUALITY_DEFAULT = Quality.combineQualities(iqualityID, aqualityID)
if self.status:
for status in statusStrings.statusStrings:
if (statusStrings[status].lower() == str(self.status).lower()):
self.status = status
break
if (not (self.status in statusStrings.statusStrings)):
raise ApiError('Invalid Status')
if (int(self.status) not in (3, 5, 6, 7)):
raise ApiError('Status Prohibited')
sickbeard.STATUS_DEFAULT = self.status
if (self.flatten_folders is not None):
sickbeard.FLATTEN_FOLDERS_DEFAULT = int(self.flatten_folders)
if (self.future_show_paused is not None):
sickbeard.COMING_EPS_DISPLAY_PAUSED = int(self.future_show_paused)
return _responds(RESULT_SUCCESS, msg='Saved defaults')
|
'shutdown sickbeard'
| def run(self):
| threading.Timer(2, sickbeard.invoke_shutdown).start()
return _responds(RESULT_SUCCESS, msg='SickBeard is shutting down...')
|
'display information for a given show'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
showDict = {}
showDict['season_list'] = CMD_ShowSeasonList((), {'tvdbid': self.tvdbid}).run()['data']
showDict['cache'] = CMD_ShowCache((), {'tvdbid': self.tvdbid}).run()['data']
genreList = []
if showObj.genre:
genreListTmp = showObj.genre.split('|')
for genre in genreListTmp:
if genre:
genreList.append(genre)
showDict['genre'] = genreList
showDict['quality'] = _get_quality_string(showObj.quality)
(anyQualities, bestQualities) = _mapQuality(showObj.quality)
showDict['quality_details'] = {'initial': anyQualities, 'archive': bestQualities}
try:
showDict['location'] = showObj.location
except sickbeard.exceptions.ShowDirNotFoundException:
showDict['location'] = ''
showDict['language'] = showObj.lang
showDict['show_name'] = showObj.name
showDict['paused'] = showObj.paused
showDict['air_by_date'] = showObj.air_by_date
showDict['flatten_folders'] = showObj.flatten_folders
showDict['airs'] = str(showObj.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
showDict['tvrage_id'] = showObj.tvrid
showDict['tvrage_name'] = showObj.tvrname
showDict['network'] = showObj.network
if (not showDict['network']):
showDict['network'] = ''
showDict['status'] = showObj.status
nextAirdate = ''
nextEps = showObj.nextEpisode()
if (len(nextEps) != 0):
nextAirdate = _ordinal_to_dateForm(nextEps[0].airdate.toordinal())
showDict['next_ep_airdate'] = nextAirdate
return _responds(RESULT_SUCCESS, showDict)
|
'add a show in sickbeard with an existing folder'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if showObj:
return _responds(RESULT_FAILURE, msg='An existing tvdbid already exists in the database')
if (not ek.ek(os.path.isdir, self.location)):
return _responds(RESULT_FAILURE, msg='Not a valid location')
tvdbName = None
tvdbResult = CMD_SickBeardSearchTVDB([], {'tvdbid': self.tvdbid}).run()
if (tvdbResult['result'] == result_type_map[RESULT_SUCCESS]):
if (not tvdbResult['data']['results']):
return _responds(RESULT_FAILURE, msg='Empty results returned, check tvdbid and try again')
if ((len(tvdbResult['data']['results']) == 1) and ('name' in tvdbResult['data']['results'][0])):
tvdbName = tvdbResult['data']['results'][0]['name']
if (not tvdbName):
return _responds(RESULT_FAILURE, msg='Unable to retrieve information from tvdb')
quality_map = {'sdtv': Quality.SDTV, 'sddvd': Quality.SDDVD, 'hdtv': Quality.HDTV, 'rawhdtv': Quality.RAWHDTV, 'fullhdtv': Quality.FULLHDTV, 'hdwebdl': Quality.HDWEBDL, 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, 'unknown': Quality.UNKNOWN}
newQuality = int(sickbeard.QUALITY_DEFAULT)
iqualityID = []
aqualityID = []
if self.initial:
for quality in self.initial:
iqualityID.append(quality_map[quality])
if self.archive:
for quality in self.archive:
aqualityID.append(quality_map[quality])
if (iqualityID or aqualityID):
newQuality = Quality.combineQualities(iqualityID, aqualityID)
sickbeard.showQueueScheduler.action.addShow(int(self.tvdbid), self.location, SKIPPED, newQuality, int(self.flatten_folders))
return _responds(RESULT_SUCCESS, {'name': tvdbName}, (tvdbName + ' has been queued to be added'))
|
'add a show in sickbeard with an existing folder'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if showObj:
return _responds(RESULT_FAILURE, msg='An existing tvdbid already exists in database')
if (not self.location):
if (sickbeard.ROOT_DIRS != ''):
root_dirs = sickbeard.ROOT_DIRS.split('|')
root_dirs.pop(0)
default_index = int(sickbeard.ROOT_DIRS.split('|')[0])
self.location = root_dirs[default_index]
else:
return _responds(RESULT_FAILURE, msg='Root directory is not set, please provide a location')
if (not ek.ek(os.path.isdir, self.location)):
return _responds(RESULT_FAILURE, msg=(("'" + self.location) + "' is not a valid location"))
quality_map = {'sdtv': Quality.SDTV, 'sddvd': Quality.SDDVD, 'hdtv': Quality.HDTV, 'rawhdtv': Quality.RAWHDTV, 'fullhdtv': Quality.FULLHDTV, 'hdwebdl': Quality.HDWEBDL, 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, 'unknown': Quality.UNKNOWN}
newQuality = int(sickbeard.QUALITY_DEFAULT)
iqualityID = []
aqualityID = []
if self.initial:
for quality in self.initial:
iqualityID.append(quality_map[quality])
if self.archive:
for quality in self.archive:
aqualityID.append(quality_map[quality])
if (iqualityID or aqualityID):
newQuality = Quality.combineQualities(iqualityID, aqualityID)
newStatus = sickbeard.STATUS_DEFAULT
if self.status:
for status in statusStrings.statusStrings:
if (statusStrings[status].lower() == str(self.status).lower()):
self.status = status
break
if (not (self.status in statusStrings.statusStrings)):
raise ApiError('Invalid Status')
if (int(self.status) not in (3, 5, 6, 7)):
return _responds(RESULT_FAILURE, msg='Status prohibited')
newStatus = self.status
tvdbName = None
tvdbResult = CMD_SickBeardSearchTVDB([], {'tvdbid': self.tvdbid}).run()
if (tvdbResult['result'] == result_type_map[RESULT_SUCCESS]):
if (not tvdbResult['data']['results']):
return _responds(RESULT_FAILURE, msg='Empty results returned, check tvdbid and try again')
if ((len(tvdbResult['data']['results']) == 1) and ('name' in tvdbResult['data']['results'][0])):
tvdbName = tvdbResult['data']['results'][0]['name']
if (not tvdbName):
return _responds(RESULT_FAILURE, msg='Unable to retrieve information from tvdb')
showPath = ek.ek(os.path.join, self.location, helpers.sanitizeFileName(tvdbName))
if sickbeard.ADD_SHOWS_WO_DIR:
logger.log(((u'Skipping initial creation of ' + showPath) + ' due to config.ini setting'))
else:
dir_exists = helpers.makeDir(showPath)
if (not dir_exists):
logger.log(((u'API :: Unable to create the folder ' + showPath) + ", can't add the show"), logger.ERROR)
return _responds(RESULT_FAILURE, {'path': showPath}, (('Unable to create the folder ' + showPath) + ", can't add the show"))
else:
helpers.chmodAsParent(showPath)
sickbeard.showQueueScheduler.action.addShow(int(self.tvdbid), showPath, newStatus, newQuality, int(self.flatten_folders), self.lang)
return _responds(RESULT_SUCCESS, {'name': tvdbName}, (tvdbName + ' has been queued to be added'))
|
'check sickbeard\'s cache to see if the banner or poster image for a show is valid'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
cache_obj = image_cache.ImageCache()
has_poster = 0
has_banner = 0
if ek.ek(os.path.isfile, cache_obj.poster_path(showObj.tvdbid)):
has_poster = 1
if ek.ek(os.path.isfile, cache_obj.banner_path(showObj.tvdbid)):
has_banner = 1
return _responds(RESULT_SUCCESS, {'poster': has_poster, 'banner': has_banner})
|
'delete a show in sickbeard'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
if (sickbeard.showQueueScheduler.action.isBeingAdded(showObj) or sickbeard.showQueueScheduler.action.isBeingUpdated(showObj)):
return _responds(RESULT_FAILURE, msg='Show can not be deleted while being added or updated')
showObj.deleteShow()
return _responds(RESULT_SUCCESS, msg=((u'' + showObj.name) + ' has been deleted'))
|
'get quality setting for a show in sickbeard'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
(anyQualities, bestQualities) = _mapQuality(showObj.quality)
return _responds(RESULT_SUCCESS, {'initial': anyQualities, 'archive': bestQualities})
|
'get the poster for a show in sickbeard'
| def run(self):
| return {'outputType': 'image', 'image': webserve.WebInterface().showPoster(self.tvdbid, 'poster')}
|
'get the banner for a show in sickbeard'
| def run(self):
| return {'outputType': 'image', 'image': webserve.WebInterface().showPoster(self.tvdbid, 'banner')}
|
'set a show\'s paused state in sickbeard'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
if (self.pause is True):
showObj.paused = 1
return _responds(RESULT_SUCCESS, msg=((u'' + showObj.name) + ' has been paused'))
else:
showObj.paused = 0
return _responds(RESULT_SUCCESS, msg=((u'' + showObj.name) + ' has been unpaused'))
return _responds(RESULT_FAILURE, msg=((u'' + showObj.name) + ' was unable to be paused'))
|
'refresh a show in sickbeard'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj)
return _responds(RESULT_SUCCESS, msg=((u'' + showObj.name) + ' has queued to be refreshed'))
except exceptions.CantRefreshException as e:
logger.log((((u'API:: Unable to refresh ' + showObj.name) + '. ') + str(ex(e))), logger.ERROR)
return _responds(RESULT_FAILURE, msg=(u'Unable to refresh ' + showObj.name))
|
'display the season list for a given show'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
myDB = db.DBConnection(row_type='dict')
if (self.sort == 'asc'):
sqlResults = myDB.select('SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season ASC', [self.tvdbid])
else:
sqlResults = myDB.select('SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC', [self.tvdbid])
seasonList = []
for row in sqlResults:
seasonList.append(int(row['season']))
myDB.connection.close()
return _responds(RESULT_SUCCESS, seasonList)
|
'display a listing of episodes for all or a given show'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
myDB = db.DBConnection(row_type='dict')
if (self.season is None):
sqlResults = myDB.select('SELECT name, episode, airdate, status, season FROM tv_episodes WHERE showid = ?', [self.tvdbid])
seasons = {}
for row in sqlResults:
(status, quality) = Quality.splitCompositeStatus(int(row['status']))
row['status'] = _get_status_Strings(status)
row['quality'] = _get_quality_string(quality)
row['airdate'] = _ordinal_to_dateForm(row['airdate'])
curSeason = int(row['season'])
curEpisode = int(row['episode'])
del row['season']
del row['episode']
if (not (curSeason in seasons)):
seasons[curSeason] = {}
seasons[curSeason][curEpisode] = row
else:
sqlResults = myDB.select('SELECT name, episode, airdate, status FROM tv_episodes WHERE showid = ? AND season = ?', [self.tvdbid, self.season])
if (len(sqlResults) is 0):
return _responds(RESULT_FAILURE, msg='Season not found')
seasons = {}
for row in sqlResults:
curEpisode = int(row['episode'])
del row['episode']
(status, quality) = Quality.splitCompositeStatus(int(row['status']))
row['status'] = _get_status_Strings(status)
row['quality'] = _get_quality_string(quality)
row['airdate'] = _ordinal_to_dateForm(row['airdate'])
if (not (curEpisode in seasons)):
seasons[curEpisode] = {}
seasons[curEpisode] = row
myDB.connection.close()
return _responds(RESULT_SUCCESS, seasons)
|
'set the quality for a show in sickbeard by taking in a deliminated
string of qualities, map to their value and combine for new values'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
quality_map = {'sdtv': Quality.SDTV, 'sddvd': Quality.SDDVD, 'hdtv': Quality.HDTV, 'rawhdtv': Quality.RAWHDTV, 'fullhdtv': Quality.FULLHDTV, 'hdwebdl': Quality.HDWEBDL, 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, 'unknown': Quality.UNKNOWN}
newQuality = int(sickbeard.QUALITY_DEFAULT)
iqualityID = []
aqualityID = []
if self.initial:
for quality in self.initial:
iqualityID.append(quality_map[quality])
if self.archive:
for quality in self.archive:
aqualityID.append(quality_map[quality])
if (iqualityID or aqualityID):
newQuality = Quality.combineQualities(iqualityID, aqualityID)
showObj.quality = newQuality
return _responds(RESULT_SUCCESS, msg=((showObj.name + ' quality has been changed to ') + _get_quality_string(showObj.quality)))
|
'display episode statistics for a given show'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
episode_status_counts_total = {}
episode_status_counts_total['total'] = 0
for status in statusStrings.statusStrings.keys():
if (status in [UNKNOWN, DOWNLOADED, SNATCHED, SNATCHED_PROPER]):
continue
episode_status_counts_total[status] = 0
episode_qualities_counts_download = {}
episode_qualities_counts_download['total'] = 0
for statusCode in Quality.DOWNLOADED:
(status, quality) = Quality.splitCompositeStatus(statusCode)
if (quality in [Quality.NONE]):
continue
episode_qualities_counts_download[statusCode] = 0
episode_qualities_counts_snatch = {}
episode_qualities_counts_snatch['total'] = 0
for statusCode in (Quality.SNATCHED + Quality.SNATCHED_PROPER):
(status, quality) = Quality.splitCompositeStatus(statusCode)
if (quality in [Quality.NONE]):
continue
episode_qualities_counts_snatch[statusCode] = 0
myDB = db.DBConnection(row_type='dict')
sqlResults = myDB.select('SELECT status, season, airdate FROM tv_episodes WHERE season > 0 AND episode > 0 AND showid = ?', [self.tvdbid])
for row in sqlResults:
(status, quality) = Quality.splitCompositeStatus(int(row['status']))
episode_status_counts_total['total'] += 1
if (status in Quality.DOWNLOADED):
episode_qualities_counts_download['total'] += 1
episode_qualities_counts_download[int(row['status'])] += 1
elif (status in (Quality.SNATCHED + Quality.SNATCHED_PROPER)):
episode_qualities_counts_snatch['total'] += 1
episode_qualities_counts_snatch[int(row['status'])] += 1
elif ((status == 0) or (row['airdate'] == 1)):
episode_status_counts_total['total'] -= 1
else:
episode_status_counts_total[status] += 1
episodes_stats = {}
episodes_stats['downloaded'] = {}
for statusCode in episode_qualities_counts_download:
if (statusCode is 'total'):
episodes_stats['downloaded']['total'] = episode_qualities_counts_download[statusCode]
continue
(status, quality) = Quality.splitCompositeStatus(int(statusCode))
quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_').replace('(', '').replace(')', '')
episodes_stats['downloaded'][quality_string] = episode_qualities_counts_download[statusCode]
episodes_stats['snatched'] = {}
for statusCode in episode_qualities_counts_snatch:
if (statusCode is 'total'):
episodes_stats['snatched']['total'] = episode_qualities_counts_snatch[statusCode]
continue
(status, quality) = Quality.splitCompositeStatus(int(statusCode))
quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_').replace('(', '').replace(')', '')
if (quality_string in episodes_stats['snatched']):
episodes_stats['snatched'][quality_string] += episode_qualities_counts_snatch[statusCode]
else:
episodes_stats['snatched'][quality_string] = episode_qualities_counts_snatch[statusCode]
for statusCode in episode_status_counts_total:
if (statusCode is 'total'):
episodes_stats['total'] = episode_status_counts_total[statusCode]
continue
(status, quality) = Quality.splitCompositeStatus(int(statusCode))
statusString = statusStrings.statusStrings[statusCode].lower().replace(' ', '_').replace('(', '').replace(')', '')
episodes_stats[statusString] = episode_status_counts_total[statusCode]
myDB.connection.close()
return _responds(RESULT_SUCCESS, episodes_stats)
|
'update a show in sickbeard'
| def run(self):
| showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.tvdbid))
if (not showObj):
return _responds(RESULT_FAILURE, msg='Show not found')
try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True)
return _responds(RESULT_SUCCESS, msg=((u'' + showObj.name) + ' has queued to be updated'))
except exceptions.CantUpdateException as e:
logger.log((((u'API:: Unable to update ' + showObj.name) + '. ') + str(ex(e))), logger.ERROR)
return _responds(RESULT_FAILURE, msg=(u'Unable to update ' + showObj.name))
|
'display_is_int_multi( self.tvdbid )shows in sickbeard'
| def run(self):
| shows = {}
for curShow in sickbeard.showList:
nextAirdate = ''
nextEps = curShow.nextEpisode()
if (len(nextEps) != 0):
nextAirdate = _ordinal_to_dateForm(nextEps[0].airdate.toordinal())
if ((self.paused is not None) and (bool(self.paused) != bool(curShow.paused))):
continue
showDict = {'paused': curShow.paused, 'quality': _get_quality_string(curShow.quality), 'language': curShow.lang, 'air_by_date': curShow.air_by_date, 'tvdbid': curShow.tvdbid, 'tvrage_id': curShow.tvrid, 'tvrage_name': curShow.tvrname, 'network': curShow.network, 'show_name': curShow.name, 'status': curShow.status, 'next_ep_airdate': nextAirdate}
showDict['cache'] = CMD_ShowCache((), {'tvdbid': curShow.tvdbid}).run()['data']
if (not showDict['network']):
showDict['network'] = ''
if (self.sort == 'name'):
shows[curShow.name] = showDict
else:
shows[curShow.tvdbid] = showDict
return _responds(RESULT_SUCCESS, shows)
|
'display the global shows and episode stats'
| def run(self):
| stats = {}
myDB = db.DBConnection()
today = str(datetime.date.today().toordinal())
status_snatched = (('(' + ','.join([str(quality) for quality in (Quality.SNATCHED + Quality.SNATCHED_PROPER)])) + ')')
status_download = (('(' + ','.join([str(quality) for quality in (Quality.DOWNLOADED + [ARCHIVED])])) + ')')
sql_statement = 'SELECT '
sql_statement += (('(SELECT COUNT(*) FROM tv_episodes WHERE season > 0 AND episode > 0 AND status IN ' + status_snatched) + ') AS ep_snatched, ')
sql_statement += (('(SELECT COUNT(*) FROM tv_episodes WHERE season > 0 AND episode > 0 AND status IN ' + status_download) + ') AS ep_downloaded, ')
sql_statement += '(SELECT COUNT(*) FROM tv_episodes WHERE season > 0 AND episode > 0 '
sql_statement += ((((((' AND ((airdate > 1 AND airdate <= ' + today) + ' AND (status = ') + str(SKIPPED)) + ' OR status = ') + str(WANTED)) + ')) ')
sql_statement += ((((' OR (status IN ' + status_snatched) + ') OR (status IN ') + status_download) + '))) AS ep_total ')
sql_statement += ' FROM tv_episodes tv_eps LIMIT 1'
sql_result = myDB.select(sql_statement)
stats['shows_total'] = len(sickbeard.showList)
stats['shows_active'] = len([show for show in sickbeard.showList if ((show.paused == 0) and (show.status != 'Ended'))])
if sql_result:
stats['ep_snatched'] = sql_result[0]['ep_snatched']
stats['ep_downloaded'] = sql_result[0]['ep_downloaded']
stats['ep_total'] = sql_result[0]['ep_total']
else:
stats['ep_snatched'] = 0
stats['ep_downloaded'] = 0
stats['ep_total'] = 0
myDB.connection.close()
return _responds(RESULT_SUCCESS, stats)
|
'Creates a new post processor with the given file path and optionally an NZB name.
file_path: The path to the file to be processed
nzb_name: The name of the NZB which resulted in this file being downloaded (optional)'
| def __init__(self, file_path, nzb_name=None, pp_options={}):
| self.folder_path = ek.ek(os.path.dirname, ek.ek(os.path.abspath, file_path))
self.file_path = file_path
self.file_name = ek.ek(os.path.basename, file_path)
self.folder_name = ek.ek(os.path.basename, self.folder_path)
self.nzb_name = nzb_name
self.force_replace = pp_options.get('force_replace', False)
self.in_history = False
self.release_group = None
self.release_name = None
self.is_proper = False
self.log = ''
|
'A wrapper for the internal logger which also keeps track of messages and saves them to a string for later.
message: The string to log (unicode)
level: The log level to use (optional)'
| def _log(self, message, level=logger.MESSAGE):
| logger.log(message, level)
self.log += (message + '\n')
|
'Checks if a file exists already and if it does whether it\'s bigger or smaller than
the file we are post processing
existing_file: The file to compare to
Returns:
DOESNT_EXIST if the file doesn\'t exist
EXISTS_LARGER if the file exists and is larger than the file we are post processing
EXISTS_SMALLER if the file exists and is smaller than the file we are post processing
EXISTS_SAME if the file exists and is the same size as the file we are post processing'
| def _checkForExistingFile(self, existing_file):
| if (not existing_file):
self._log(u'There is no existing file', logger.DEBUG)
return PostProcessor.DOESNT_EXIST
if ek.ek(os.path.isfile, existing_file):
if (ek.ek(os.path.getsize, existing_file) > ek.ek(os.path.getsize, self.file_path)):
self._log((((u'File ' + existing_file) + ' is larger than ') + self.file_path), logger.DEBUG)
return PostProcessor.EXISTS_LARGER
elif (ek.ek(os.path.getsize, existing_file) == ek.ek(os.path.getsize, self.file_path)):
self._log((((u'File ' + existing_file) + ' is the same size as ') + self.file_path), logger.DEBUG)
return PostProcessor.EXISTS_SAME
else:
self._log((((u'File ' + existing_file) + ' is smaller than ') + self.file_path), logger.DEBUG)
return PostProcessor.EXISTS_SMALLER
else:
self._log(((u'File ' + existing_file) + " doesn't exist"), logger.DEBUG)
return PostProcessor.DOESNT_EXIST
|
'Deletes the file and optionally all associated files.
file_path: The file to delete
associated_files: True to delete all files which differ only by extension, False to leave them'
| def _delete(self, file_path, associated_files=False):
| if (not file_path):
return
file_list = [file_path]
if associated_files:
file_list = (file_list + helpers.list_associated_files(file_path, base_name_only=True))
if (not file_list):
self._log(((u'There were no files associated with ' + file_path) + ', not deleting anything'), logger.DEBUG)
return
for cur_file in file_list:
if ek.ek(os.path.isfile, cur_file):
self._log((u'Deleting file ' + cur_file), logger.DEBUG)
ek.ek(os.remove, cur_file)
notifiers.synoindex_notifier.deleteFile(cur_file)
|
'Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location,
and optionally move associated files too.
file_path: The full path of the media file to act on
new_path: Destination path where we want to move/copy the file to
new_base_name: The base filename (no extension) to use during the copy. Use None to keep the same name.
associated_files: Boolean, whether we should copy similarly-named files too
action: function that takes an old path and new path and does an operation with them (move/copy)'
| def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None):
| if (not action):
self._log(u'Must provide an action for the combined file operation', logger.ERROR)
return
file_list = [file_path]
if associated_files:
file_list = (file_list + helpers.list_associated_files(file_path, filter_ext=sickbeard.FILTER_ASSOCIATED_FILES))
if (not file_list):
self._log(((u'There were no files associated with ' + file_path) + ', not moving anything'), logger.DEBUG)
return
old_base_name = file_path.rpartition('.')[0]
old_base_name_length = len(old_base_name)
for cur_file_path in file_list:
cur_file_name = ek.ek(os.path.basename, cur_file_path)
cur_extension = cur_file_path[(old_base_name_length + 1):]
if (cur_extension == 'nfo'):
cur_extension = 'nfo-orig'
if new_base_name:
new_file_name = ((new_base_name + '.') + cur_extension)
else:
new_file_name = helpers.replaceExtension(cur_file_name, cur_extension)
new_file_path = ek.ek(os.path.join, new_path, new_file_name)
action(cur_file_path, new_file_path)
|
'file_path: The full path of the media file to move
new_path: Destination path where we want to move the file to
new_base_name: The base filename (no extension) to use during the move. Use None to keep the same name.
associated_files: Boolean, whether we should move similarly-named files too'
| def _move(self, file_path, new_path, new_base_name, associated_files=False):
| def _int_move(cur_file_path, new_file_path):
self._log((((u'Moving file from ' + cur_file_path) + ' to ') + new_file_path), logger.DEBUG)
try:
helpers.moveFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path)
except (IOError, OSError) as e:
self._log((((((u'Unable to move file ' + cur_file_path) + ' to ') + new_file_path) + ': ') + ex(e)), logger.ERROR)
raise e
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move)
|
'file_path: The full path of the media file to copy
new_path: Destination path where we want to copy the file to
new_base_name: The base filename (no extension) to use during the copy. Use None to keep the same name.
associated_files: Boolean, whether we should copy similarly-named files too'
| def _copy(self, file_path, new_path, new_base_name, associated_files=False):
| def _int_copy(cur_file_path, new_file_path):
self._log((((u'Copying file from ' + cur_file_path) + ' to ') + new_file_path), logger.DEBUG)
try:
helpers.copyFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path)
except (IOError, OSError) as e:
logger.log((((((u'Unable to copy file ' + cur_file_path) + ' to ') + new_file_path) + ': ') + ex(e)), logger.ERROR)
raise e
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy)
|
'Look up the NZB name in the history and see if it contains a record for self.nzb_name
Returns a (tvdb_id, season, [], quality) tuple. tvdb_id, season, quality may be None and episodes may be [].'
| def _history_lookup(self):
| to_return = (None, None, [], None)
if ((not self.nzb_name) and (not self.folder_name)):
self.in_history = False
return to_return
names = []
if self.nzb_name:
names.append(self.nzb_name)
if ('.' in self.nzb_name):
names.append(self.nzb_name.rpartition('.')[0])
if self.folder_name:
names.append(self.folder_name)
myDB = db.DBConnection()
for curName in names:
search_name = re.sub('[\\.\\-\\ ]', '_', curName)
sql_results = myDB.select('SELECT * FROM history WHERE resource LIKE ?', [search_name])
if (len(sql_results) == 0):
continue
tvdb_id = int(sql_results[0]['showid'])
season = int(sql_results[0]['season'])
quality = int(sql_results[0]['quality'])
if (quality == common.Quality.UNKNOWN):
quality = None
self.in_history = True
to_return = (tvdb_id, season, [], quality)
self._log((u'Found result in history: ' + str(to_return)), logger.DEBUG)
return to_return
self.in_history = False
return to_return
|
'Takes a name and tries to figure out a show, season, and episode from it.
name: A string which we want to analyze to determine show info from (unicode)
Returns a (tvdb_id, season, [episodes], quality) tuple. tvdb_id, season, quality may be None and episodes may be [].
if none were found.'
| def _analyze_name(self, name, file_name=True):
| logger.log((u'Analyzing name ' + repr(name)))
to_return = (None, None, [], None)
if (not name):
return to_return
name = helpers.remove_non_release_groups(helpers.remove_extension(name))
np = NameParser(False)
parse_result = np.parse(name)
self._log((((u'Parsed ' + name) + ' into ') + str(parse_result).decode('utf-8', 'xmlcharrefreplace')), logger.DEBUG)
if parse_result.air_by_date:
season = (-1)
episodes = [parse_result.air_date]
else:
season = parse_result.season_number
episodes = parse_result.episode_numbers
to_return = (None, season, episodes, None)
name_list = show_name_helpers.sceneToNormalShowNames(parse_result.series_name)
if (not name_list):
return (None, season, episodes, None)
for cur_name in name_list:
self._log(((u'Looking up ' + cur_name) + u' in the DB'), logger.DEBUG)
db_result = helpers.searchDBForShow(cur_name)
if db_result:
self._log((u'Lookup successful, using tvdb id ' + str(db_result[0])), logger.DEBUG)
self._finalize(parse_result)
return (int(db_result[0]), season, episodes, None)
for cur_name in name_list:
self._log((u'Checking scene exceptions for a match on ' + cur_name), logger.DEBUG)
scene_id = scene_exceptions.get_scene_exception_by_name(cur_name)
if scene_id:
self._log(((u'Scene exception lookup got tvdb id ' + str(scene_id)) + u', using that'), logger.DEBUG)
self._finalize(parse_result)
return (scene_id, season, episodes, None)
for cur_name in name_list:
try:
t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **sickbeard.TVDB_API_PARMS)
self._log(((u'Looking up name ' + cur_name) + u' on TVDB'), logger.DEBUG)
showObj = t[cur_name]
except tvdb_exceptions.tvdb_exception:
try:
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
ltvdb_api_parms['search_all_languages'] = True
t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **ltvdb_api_parms)
self._log(((u'Looking up name ' + cur_name) + u' in all languages on TVDB'), logger.DEBUG)
showObj = t[cur_name]
except (tvdb_exceptions.tvdb_exception, IOError):
pass
continue
except IOError:
continue
self._log((u'Lookup successful, using tvdb id ' + str(showObj['id'])), logger.DEBUG)
self._finalize(parse_result)
return (int(showObj['id']), season, episodes, None)
self._finalize(parse_result)
return to_return
|
'For a given file try to find the showid, season, and episode.'
| def _find_info(self):
| tvdb_id = season = quality = None
episodes = []
attempt_list = [self._history_lookup, (lambda : self._analyze_name(self.nzb_name)), (lambda : self._analyze_name(self.file_name)), (lambda : self._analyze_name(self.folder_name)), (lambda : self._analyze_name(self.file_path)), (lambda : self._analyze_name(((self.folder_name + u' ') + self.file_name)))]
for cur_attempt in attempt_list:
try:
(cur_tvdb_id, cur_season, cur_episodes, cur_quality) = cur_attempt()
except InvalidNameException as e:
logger.log((u'Unable to parse, skipping: ' + ex(e)), logger.DEBUG)
continue
if (cur_tvdb_id and (not (self.in_history and tvdb_id))):
tvdb_id = cur_tvdb_id
if (cur_quality and (not (self.in_history and quality))):
quality = cur_quality
if (cur_season is not None):
season = cur_season
if cur_episodes:
episodes = cur_episodes
if ((season == (-1)) and tvdb_id and episodes):
self._log(u'Looks like this is an air-by-date show, attempting to convert the date to season/episode', logger.DEBUG)
airdate = episodes[0].toordinal()
myDB = db.DBConnection()
sql_result = myDB.select('SELECT season, episode FROM tv_episodes WHERE showid = ? and airdate = ?', [tvdb_id, airdate])
if sql_result:
season = int(sql_result[0][0])
episodes = [int(sql_result[0][1])]
else:
self._log(((((u'Unable to find episode with date ' + str(episodes[0])) + u' for show ') + str(tvdb_id)) + u', skipping'), logger.DEBUG)
episodes = []
continue
elif ((season is None) and tvdb_id):
myDB = db.DBConnection()
numseasonsSQlResult = myDB.select('SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0', [tvdb_id])
if ((int(numseasonsSQlResult[0][0]) == 1) and (season is None)):
self._log(u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...", logger.DEBUG)
season = 1
if (tvdb_id and (season is not None) and episodes):
return (tvdb_id, season, episodes, quality)
return (tvdb_id, season, episodes, quality)
|
'Retrieve the TVEpisode object requested.
tvdb_id: The TVDBID of the show (int)
season: The season of the episode (int)
episodes: A list of episodes to find (list of ints)
If the episode(s) can be found then a TVEpisode object with the correct related eps will
be instantiated and returned. If the episode can\'t be found then None will be returned.'
| def _get_ep_obj(self, tvdb_id, season, episodes):
| show_obj = None
self._log((u'Loading show object for tvdb_id ' + str(tvdb_id)), logger.DEBUG)
try:
show_obj = helpers.findCertainShow(sickbeard.showList, tvdb_id)
except exceptions.MultipleShowObjectsException:
raise
if (not show_obj):
error_msg = u"This show isn't in your list, you need to add it to SB before post-processing an episode"
self._log(error_msg, logger.ERROR)
raise exceptions.PostProcessingFailed(error_msg)
root_ep = None
for cur_episode in episodes:
episode = int(cur_episode)
self._log((((u'Retrieving episode object for ' + str(season)) + 'x') + str(episode)), logger.DEBUG)
try:
curEp = show_obj.getEpisode(season, episode)
except exceptions.EpisodeNotFoundException as e:
error_msg = (u'Unable to create episode: ' + ex(e))
self._log(error_msg, logger.DEBUG)
raise exceptions.PostProcessingFailed(error_msg)
if (root_ep is None):
root_ep = curEp
root_ep.relatedEps = []
elif (curEp not in root_ep.relatedEps):
root_ep.relatedEps.append(curEp)
return root_ep
|
'Determines the quality of the file that is being post processed by parsing through the data available.
ep_obj: The TVEpisode object related to the file we are post processing
Returns: A quality value found in common.Quality'
| def _get_quality(self, ep_obj):
| ep_quality = common.Quality.UNKNOWN
name_list = [self.nzb_name, self.folder_name, self.file_name]
for cur_name in name_list:
if (not cur_name):
continue
ep_quality = common.Quality.nameQuality(cur_name)
self._log((((u'Looking up quality for name ' + cur_name) + u', got ') + common.Quality.qualityStrings[ep_quality]), logger.DEBUG)
if (ep_quality != common.Quality.UNKNOWN):
logger.log((((cur_name + u' looks like it has quality ') + common.Quality.qualityStrings[ep_quality]) + ', using that'), logger.DEBUG)
return ep_quality
if (ep_obj.status in (common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER)):
(oldStatus, ep_quality) = common.Quality.splitCompositeStatus(ep_obj.status)
if (ep_quality != common.Quality.UNKNOWN):
self._log((u'The old status had a quality in it, using that: ' + common.Quality.qualityStrings[ep_quality]), logger.DEBUG)
return ep_quality
ep_quality = common.Quality.assumeQuality(self.file_name)
self._log((((u'Guessing quality for name ' + self.file_name) + u', got ') + common.Quality.qualityStrings[ep_quality]), logger.DEBUG)
if (ep_quality != common.Quality.UNKNOWN):
logger.log((((self.file_name + u' looks like it has quality ') + common.Quality.qualityStrings[ep_quality]) + ', using that'), logger.DEBUG)
return ep_quality
return ep_quality
|
'Executes any extra scripts defined in the config.
ep_obj: The object to use when calling the extra script'
| def _run_extra_scripts(self, ep_obj):
| for curScriptName in sickbeard.EXTRA_SCRIPTS:
try:
script_cmd = [piece for piece in re.split('( |\\".*?\\"|\'.*?\')', curScriptName) if piece.strip()]
script_cmd = (script_cmd + [ep_obj.location.encode(sickbeard.SYS_ENCODING), self.file_path.encode(sickbeard.SYS_ENCODING), str(ep_obj.show.tvdbid), str(ep_obj.season), str(ep_obj.episode), str(ep_obj.airdate)])
self._log((u'Executing command ' + str(script_cmd)))
p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
(out, err) = p.communicate()
self._log((u'Script result: ' + str(out)), logger.DEBUG)
except Exception as e:
self._log((u'Unable to run extra_script: ' + ex(e)))
|
'Determines if the new episode can safely replace old episode.
Episodes which are expected (snatched) or larger than the existing episode are priority, others are not.
ep_obj: The TVEpisode object in question
new_ep_quality: The quality of the episode that is being processed
Returns: True if the episode can safely replace old episode, False otherwise.'
| def _safe_replace(self, ep_obj, new_ep_quality):
| if (ep_obj.status in (common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER)):
self._log(u'Sick Beard snatched this episode, marking it safe to replace', logger.DEBUG)
return True
(old_ep_status, old_ep_quality) = common.Quality.splitCompositeStatus(ep_obj.status)
if ((old_ep_status != common.DOWNLOADED) and (old_ep_status != common.ARCHIVED)):
self._log(u'Existing episode status is not downloaded/archived, marking it safe to replace', logger.DEBUG)
return True
if (old_ep_status == common.ARCHIVED):
self._log(u'Existing episode status is archived, marking it unsafe to replace', logger.DEBUG)
return False
if self.force_replace:
self._log(u'Processed episode is set to force replace existing episode, marking it safe to replace', logger.DEBUG)
return True
if (new_ep_quality > old_ep_quality):
if (new_ep_quality != common.Quality.UNKNOWN):
self._log(u'Existing episode status is not snatched but processed episode appears to be better quality than existing episode, marking it safe to replace', logger.DEBUG)
return True
else:
self._log(u'Episode already exists in database and processed episode has unknown quality, marking it unsafe to replace', logger.DEBUG)
return False
if (new_ep_quality == old_ep_quality):
self._log(u'Episode already exists in database and has same quality as processed episode', logger.DEBUG)
self._log((u'Checking size of existing file: ' + ep_obj.location), logger.DEBUG)
existing_file_status = self._checkForExistingFile(ep_obj.location)
if (existing_file_status == PostProcessor.EXISTS_LARGER):
self._log(u'File exists and new file is smaller, marking it unsafe to replace', logger.DEBUG)
return False
elif (existing_file_status == PostProcessor.EXISTS_SAME):
self._log(u'File exists and new file is same size, marking it unsafe to replace', logger.DEBUG)
return False
elif (existing_file_status == PostProcessor.EXISTS_SMALLER):
self._log(u'File exists and new file is larger, marking it safe to replace', logger.DEBUG)
return True
elif (existing_file_status == PostProcessor.DOESNT_EXIST):
if ((not ek.ek(os.path.isdir, ep_obj.show._location)) and (not sickbeard.CREATE_MISSING_SHOW_DIRS)):
self._log(u"File and Show location doesn't exist, marking it unsafe to replace", logger.DEBUG)
return False
else:
self._log(u"File doesn't exist, marking it safe to replace", logger.DEBUG)
return True
else:
self._log(((u'Unknown file status for: ' + ep_obj.location) + 'This should never happen, please log this as a bug.'), logger.ERROR)
return False
if ((new_ep_quality < old_ep_quality) and (old_ep_quality != common.Quality.UNKNOWN)):
self._log(u'Episode already exists in database and processed episode has lower quality, marking it unsafe to replace', logger.DEBUG)
return False
self._log(u'None of the conditions were met, marking it unsafe to replace', logger.DEBUG)
return False
|
'Post-process a given file'
| def process(self):
| self._log(((((u'Processing ' + self.file_path) + ' (') + str(self.nzb_name)) + ')'))
if ek.ek(os.path.isdir, self.file_path):
self._log(((u'File ' + self.file_path) + ' seems to be a directory'))
return False
self.in_history = False
(tvdb_id, season, episodes, quality) = self._find_info()
if ((not tvdb_id) or (season is None) or (not episodes)):
self._log(u'Not enough information to determine what episode this is', logger.DEBUG)
self._log(u'Quitting post-processing', logger.DEBUG)
return False
ep_obj = self._get_ep_obj(tvdb_id, season, episodes)
if quality:
self._log((u'Snatch history had a quality in it, using that: ' + common.Quality.qualityStrings[quality]), logger.DEBUG)
new_ep_quality = quality
else:
new_ep_quality = self._get_quality(ep_obj)
logger.log((u'Quality of the processing episode: ' + str(new_ep_quality)), logger.DEBUG)
safe_replace = self._safe_replace(ep_obj, new_ep_quality)
if (not safe_replace):
self._log(u'Quitting post-processing', logger.DEBUG)
return False
else:
self._log(u'This download is marked as safe to replace existing file', logger.DEBUG)
for cur_ep in ([ep_obj] + ep_obj.relatedEps):
try:
self._delete(cur_ep.location, associated_files=True)
if cur_ep.location:
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location)
except (OSError, IOError):
raise exceptions.PostProcessingFailed(u'Unable to delete the existing files')
if ((not ek.ek(os.path.isdir, ep_obj.show._location)) and sickbeard.CREATE_MISSING_SHOW_DIRS):
self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
try:
ek.ek(os.mkdir, ep_obj.show._location)
notifiers.synoindex_notifier.addFolder(ep_obj.show._location)
except (OSError, IOError):
raise exceptions.PostProcessingFailed((u'Unable to create the show directory: ' + ep_obj.show._location))
ep_obj.show.writeMetadata(True)
for cur_ep in ([ep_obj] + ep_obj.relatedEps):
if self.release_name:
self._log((u'Found release name ' + self.release_name), logger.DEBUG)
cur_ep.release_name = self.release_name
else:
cur_ep.release_name = ''
cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality)
try:
proper_path = ep_obj.proper_path()
proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path)
dest_path = ek.ek(os.path.dirname, proper_absolute_path)
except exceptions.ShowDirNotFoundException:
raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting")
self._log((u'Destination folder for this episode: ' + dest_path), logger.DEBUG)
if (not helpers.make_dirs(dest_path)):
raise exceptions.PostProcessingFailed((u'Unable to create destination folder: ' + dest_path))
if sickbeard.RENAME_EPISODES:
orig_extension = self.file_name.rpartition('.')[(-1)]
new_base_name = ek.ek(os.path.basename, proper_path)
new_file_name = ((new_base_name + '.') + orig_extension)
else:
new_base_name = None
new_file_name = self.file_name
try:
if sickbeard.KEEP_PROCESSED_DIR:
self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES)
else:
self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES)
except (OSError, IOError):
raise exceptions.PostProcessingFailed((u'Unable to move the files to destination folder: ' + dest_path))
for cur_ep in ([ep_obj] + ep_obj.relatedEps):
with cur_ep.lock:
cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name)
cur_ep.saveToDB()
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)
if (not ep_obj.show.skip_notices):
notifiers.notify_download(ep_obj.prettyName())
ep_obj.createMetaFiles()
ep_obj.saveToDB()
notifiers.update_library(ep_obj)
self._run_extra_scripts(ep_obj)
return True
|
'Return a new randomized API_KEY'
| @cherrypy.expose
def generateKey(self):
| try:
from hashlib import md5
except ImportError:
from md5 import md5
t = str(time.time())
r = str(random.random())
m = md5(t)
m.update(r)
logger.log(u'New SB API key generated')
return m.hexdigest()
|
'Display the new show page which collects a tvdb id, folder, and extra options and
posts them to addNewShow'
| @cherrypy.expose
def newShow(self, show_to_add=None, other_shows=None):
| t = PageTemplate(file='home_newShow.tmpl')
t.submenu = HomeMenu()
(show_dir, tvdb_id, show_name) = self.split_extra_show(show_to_add)
if (tvdb_id and show_name):
use_provided_info = True
else:
use_provided_info = False
t.use_provided_info = use_provided_info
if (not show_dir):
t.default_show_name = ''
elif (not show_name):
t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.', ' ')
else:
t.default_show_name = show_name
if (not other_shows):
other_shows = []
elif (type(other_shows) != list):
other_shows = [other_shows]
if use_provided_info:
t.provided_tvdb_id = tvdb_id
t.provided_tvdb_name = show_name
t.provided_show_dir = show_dir
t.other_shows = other_shows
return _munge(t)
|
'Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are
provided then it forwards back to newShow, if not it goes to /home.'
| @cherrypy.expose
def addNewShow(self, whichSeries=None, tvdbLang='en', rootDir=None, defaultStatus=None, anyQualities=None, bestQualities=None, flatten_folders=None, fullShowPath=None, other_shows=None, skipShow=None):
| if (not other_shows):
other_shows = []
elif (type(other_shows) != list):
other_shows = [other_shows]
def finishAddShow():
if (not other_shows):
redirect('/home/')
next_show_dir = other_shows[0]
rest_of_show_dirs = other_shows[1:]
return self.newShow(next_show_dir, rest_of_show_dirs)
if skipShow:
return finishAddShow()
if (((not rootDir) and (not fullShowPath)) or (not whichSeries)):
return ((((('Missing params, no tvdb id or folder:' + repr(whichSeries)) + ' and ') + repr(rootDir)) + '/') + repr(fullShowPath))
series_pieces = whichSeries.partition('|')
if (len(series_pieces) < 3):
return 'Error with show selection.'
tvdb_id = int(series_pieces[0])
show_name = series_pieces[2]
if fullShowPath:
show_dir = ek.ek(os.path.normpath, fullShowPath)
else:
show_dir = ek.ek(os.path.join, rootDir, helpers.sanitizeFileName(show_name))
if (ek.ek(os.path.isdir, show_dir) and (not fullShowPath)):
ui.notifications.error('Unable to add show', (('Folder ' + show_dir) + ' exists already'))
redirect('/home/addShows/existingShows/')
if sickbeard.ADD_SHOWS_WO_DIR:
logger.log(((u'Skipping initial creation of ' + show_dir) + ' due to config.ini setting'))
else:
dir_exists = helpers.makeDir(show_dir)
if (not dir_exists):
logger.log(((u'Unable to create the folder ' + show_dir) + ", can't add the show"), logger.ERROR)
ui.notifications.error('Unable to add show', (('Unable to create the folder ' + show_dir) + ", can't add the show"))
redirect('/home/')
else:
helpers.chmodAsParent(show_dir)
flatten_folders = config.checkbox_to_value(flatten_folders)
if (not anyQualities):
anyQualities = []
if (not bestQualities):
bestQualities = []
if (type(anyQualities) != list):
anyQualities = [anyQualities]
if (type(bestQualities) != list):
bestQualities = [bestQualities]
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
sickbeard.showQueueScheduler.action.addShow(tvdb_id, show_dir, int(defaultStatus), newQuality, flatten_folders, tvdbLang)
ui.notifications.message('Show added', ('Adding the specified show into ' + show_dir))
return finishAddShow()
|
'Prints out the page to add existing shows from a root dir'
| @cherrypy.expose
def existingShows(self):
| t = PageTemplate(file='home_addExistingShow.tmpl')
t.submenu = HomeMenu()
return _munge(t)
|
'Receives a dir list and add them. Adds the ones with given TVDB IDs first, then forwards
along to the newShow page.'
| @cherrypy.expose
def addExistingShows(self, shows_to_add=None, promptForSettings=None):
| if (not shows_to_add):
shows_to_add = []
elif (type(shows_to_add) != list):
shows_to_add = [shows_to_add]
shows_to_add = [urllib.unquote_plus(x) for x in shows_to_add]
promptForSettings = config.checkbox_to_value(promptForSettings)
tvdb_id_given = []
dirs_only = []
for cur_dir in shows_to_add:
if (not ('|' in cur_dir)):
dirs_only.append(cur_dir)
else:
(show_dir, tvdb_id, show_name) = self.split_extra_show(cur_dir)
if ((not show_dir) or (not tvdb_id) or (not show_name)):
continue
tvdb_id_given.append((show_dir, int(tvdb_id), show_name))
if (promptForSettings and shows_to_add):
return self.newShow(shows_to_add[0], shows_to_add[1:])
num_added = 0
for cur_show in tvdb_id_given:
(show_dir, tvdb_id, show_name) = cur_show
sickbeard.showQueueScheduler.action.addShow(tvdb_id, show_dir, SKIPPED, sickbeard.QUALITY_DEFAULT, sickbeard.FLATTEN_FOLDERS_DEFAULT)
num_added += 1
if num_added:
ui.notifications.message('Shows Added', (('Automatically added ' + str(num_added)) + ' from their existing metadata files'))
if (not dirs_only):
redirect('/home/')
return self.newShow(dirs_only[0], dirs_only[1:])
|
'Keep web crawlers out'
| @cherrypy.expose
def robots_txt(self):
| cherrypy.response.headers['Content-Type'] = 'text/plain'
return 'User-agent: *\nDisallow: /\n'
|
'Determines how this copy of SB was installed.
returns: type of installation. Possible values are:
\'win\': any compiled windows build
\'git\': running from source using git
\'source\': running from source without git'
| def find_install_type(self):
| if sickbeard.version.SICKBEARD_VERSION.startswith('build '):
install_type = 'win'
elif os.path.isdir(ek.ek(os.path.join, sickbeard.PROG_DIR, u'.git')):
install_type = 'git'
else:
install_type = 'source'
return install_type
|
'Checks the internet for a newer version.
returns: bool, True for new version or False for no new version.
force: if true the VERSION_NOTIFY setting will be ignored and a check will be forced'
| def check_for_new_version(self, force=False):
| if ((not sickbeard.VERSION_NOTIFY) and (not force)):
logger.log(u'Version checking is disabled, not checking for the newest version')
return False
logger.log(((u'Checking if ' + self.install_type) + ' needs an update'))
if (not self.updater.need_update()):
sickbeard.NEWEST_VERSION_STRING = None
logger.log(u'No update needed')
if force:
ui.notifications.message('No update needed')
return False
self.updater.set_newest_text()
return True
|
'Checks git for the newest Windows binary build. Returns either the
build number or the entire build URL depending on whole_link\'s value.
whole_link: If True, returns the entire URL to the release. If False, it returns
only the build number. default: False'
| def _find_newest_version(self, whole_link=False):
| regex = '.*SickBeard\\-win32\\-alpha\\-build(\\d+)(?:\\.\\d+)?\\.zip'
version_url_data = helpers.getURL(self.version_url)
if (version_url_data is None):
return None
else:
for curLine in version_url_data.splitlines():
logger.log((u'checking line ' + curLine), logger.DEBUG)
match = re.match(regex, curLine)
if match:
logger.log(u'found a match', logger.DEBUG)
if whole_link:
return curLine.strip()
else:
return int(match.group(1))
return None
|
'Attempts to find the currently installed version of Sick Beard.
Uses git show to get commit version.
Returns: True for success or False for failure'
| def _find_installed_version(self):
| (output, err, exit_status) = self._run_git(self._git_path, 'rev-parse HEAD')
if ((exit_status == 0) and output):
cur_commit_hash = output.strip()
if (not re.match('^[a-z0-9]+$', cur_commit_hash)):
logger.log(u"Output doesn't look like a hash, not using it", logger.ERROR)
return False
self._cur_commit_hash = cur_commit_hash
return True
else:
return False
|
'Uses git commands to check if there is a newer version that the provided
commit hash. If there is a newer version it sets _num_commits_behind.'
| def _check_github_for_update(self):
| self._newest_commit_hash = None
self._num_commits_behind = 0
self._num_commits_ahead = 0
(output, err, exit_status) = self._run_git(self._git_path, 'fetch origin')
if (not (exit_status == 0)):
logger.log(u"Unable to contact github, can't check for update", logger.ERROR)
return
(output, err, exit_status) = self._run_git(self._git_path, 'rev-parse --verify --quiet "@{upstream}"')
if ((exit_status == 0) and output):
cur_commit_hash = output.strip()
if (not re.match('^[a-z0-9]+$', cur_commit_hash)):
logger.log(u"Output doesn't look like a hash, not using it", logger.DEBUG)
return
else:
self._newest_commit_hash = cur_commit_hash
else:
logger.log(u"git didn't return newest commit hash", logger.DEBUG)
return
(output, err, exit_status) = self._run_git(self._git_path, 'rev-list --left-right "@{upstream}"...HEAD')
if ((exit_status == 0) and output):
try:
self._num_commits_behind = int(output.count('<'))
self._num_commits_ahead = int(output.count('>'))
except:
logger.log(u"git didn't return numbers for behind and ahead, not using it", logger.DEBUG)
return
logger.log((((((((u'cur_commit = ' + str(self._cur_commit_hash)) + u', newest_commit = ') + str(self._newest_commit_hash)) + u', num_commits_behind = ') + str(self._num_commits_behind)) + u', num_commits_ahead = ') + str(self._num_commits_ahead)), logger.DEBUG)
|
'Calls git pull origin <branch> in order to update Sick Beard. Returns a bool depending
on the call\'s success.'
| def update(self):
| (output, err, exit_status) = self._run_git(self._git_path, ('pull origin ' + self.branch))
if (exit_status == 0):
return True
else:
return False
return False
|
'Uses pygithub to ask github if there is a newer version that the provided
commit hash. If there is a newer version it sets Sick Beard\'s version text.
commit_hash: hash that we\'re checking against'
| def _check_github_for_update(self):
| self._num_commits_behind = 0
self._newest_commit_hash = None
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
if self._cur_commit_hash:
branch_compared = gh.compare(base=self.branch, head=self._cur_commit_hash)
if ('base_commit' in branch_compared):
self._newest_commit_hash = branch_compared['base_commit']['sha']
if ('behind_by' in branch_compared):
self._num_commits_behind = int(branch_compared['behind_by'])
if (not self._newest_commit_hash):
for curCommit in gh.commits():
if (not self._newest_commit_hash):
self._newest_commit_hash = curCommit['sha']
if (not self._cur_commit_hash):
break
if (curCommit['sha'] == self._cur_commit_hash):
break
self._num_commits_behind += 1
logger.log((((((u'cur_commit = ' + str(self._cur_commit_hash)) + u', newest_commit = ') + str(self._newest_commit_hash)) + u', num_commits_behind = ') + str(self._num_commits_behind)), logger.DEBUG)
|
'Downloads the latest source tarball from github and installs it over the existing version.'
| def update(self):
| base_url = ((('https://github.com/' + self.github_repo_user) + '/') + self.github_repo)
tar_download_url = ((base_url + '/tarball/') + self.branch)
version_path = ek.ek(os.path.join, sickbeard.PROG_DIR, u'version.txt')
try:
sb_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sb-update')
if os.path.isdir(sb_update_dir):
logger.log(((u'Clearing out update folder ' + sb_update_dir) + ' before extracting'))
shutil.rmtree(sb_update_dir)
logger.log(((u'Creating update folder ' + sb_update_dir) + ' before extracting'))
os.makedirs(sb_update_dir)
logger.log((u'Downloading update from ' + repr(tar_download_url)))
tar_download_path = os.path.join(sb_update_dir, u'sb-update.tar')
urllib.urlretrieve(tar_download_url, tar_download_path)
if (not ek.ek(os.path.isfile, tar_download_path)):
logger.log(((u'Unable to retrieve new version from ' + tar_download_url) + ", can't update"), logger.ERROR)
return False
if (not ek.ek(tarfile.is_tarfile, tar_download_path)):
logger.log(((u'Retrieved version from ' + tar_download_url) + " is corrupt, can't update"), logger.ERROR)
return False
logger.log((u'Extracting file ' + tar_download_path))
tar = tarfile.open(tar_download_path)
tar.extractall(sb_update_dir)
tar.close()
logger.log((u'Deleting file ' + tar_download_path))
os.remove(tar_download_path)
update_dir_contents = [x for x in os.listdir(sb_update_dir) if os.path.isdir(os.path.join(sb_update_dir, x))]
if (len(update_dir_contents) != 1):
logger.log((u'Invalid update data, update failed: ' + str(update_dir_contents)), logger.ERROR)
return False
content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
logger.log((((u'Moving files from ' + content_dir) + ' to ') + sickbeard.PROG_DIR))
for (dirname, dirnames, filenames) in os.walk(content_dir):
dirname = dirname[(len(content_dir) + 1):]
for curfile in filenames:
old_path = os.path.join(content_dir, dirname, curfile)
new_path = os.path.join(sickbeard.PROG_DIR, dirname, curfile)
if os.path.isfile(new_path):
os.remove(new_path)
os.renames(old_path, new_path)
try:
with open(version_path, 'w') as ver_file:
ver_file.write(self._newest_commit_hash)
except EnvironmentError as e:
logger.log((u'Unable to write version file, update not complete: ' + ex(e)), logger.ERROR)
return False
except Exception as e:
logger.log((u'Error while trying to update: ' + ex(e)), logger.ERROR)
return False
return True
|
'Builds up the full path to the image cache directory'
| def _cache_dir(self):
| return ek.ek(os.path.abspath, ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images'))
|
'Builds up the path to a poster cache for a given tvdb id
returns: a full path to the cached poster file for the given tvdb id
tvdb_id: ID of the show to use in the file name'
| def poster_path(self, tvdb_id):
| poster_file_name = (str(tvdb_id) + '.poster.jpg')
return ek.ek(os.path.join, self._cache_dir(), poster_file_name)
|
'Builds up the path to a banner cache for a given tvdb id
returns: a full path to the cached banner file for the given tvdb id
tvdb_id: ID of the show to use in the file name'
| def banner_path(self, tvdb_id):
| banner_file_name = (str(tvdb_id) + '.banner.jpg')
return ek.ek(os.path.join, self._cache_dir(), banner_file_name)
|
'Returns true if a cached poster exists for the given tvdb id'
| def has_poster(self, tvdb_id):
| poster_path = self.poster_path(tvdb_id)
logger.log(((u'Checking if file ' + str(poster_path)) + ' exists'), logger.DEBUG)
return ek.ek(os.path.isfile, poster_path)
|
'Returns true if a cached banner exists for the given tvdb id'
| def has_banner(self, tvdb_id):
| banner_path = self.banner_path(tvdb_id)
logger.log(((u'Checking if file ' + str(banner_path)) + ' exists'), logger.DEBUG)
return ek.ek(os.path.isfile, banner_path)
|
'Analyzes the image provided and attempts to determine whether it is a poster or banner.
returns: BANNER, POSTER if it concluded one or the other, or None if the image was neither (or didn\'t exist)
path: full path to the image'
| def which_type(self, path):
| if (not ek.ek(os.path.isfile, path)):
logger.log(((u"Couldn't check the type of " + str(path)) + " cause it doesn't exist"), logger.WARNING)
return None
img_parser = createParser(path)
img_metadata = extractMetadata(img_parser)
if (not img_metadata):
logger.log(((u'Unable to get metadata from ' + str(path)) + ', not using your existing image'), logger.DEBUG)
return None
img_ratio = (float(img_metadata.get('width')) / float(img_metadata.get('height')))
img_parser.stream._input.close()
if (0.55 < img_ratio < 0.8):
return self.POSTER
elif (5 < img_ratio < 6):
return self.BANNER
else:
logger.log(((u'Image has size ratio of ' + str(img_ratio)) + ', unknown type'), logger.WARNING)
return None
|
'Takes the image provided and copies it to the cache folder
returns: bool representing success
image_path: path to the image we\'re caching
img_type: BANNER or POSTER
tvdb_id: id of the show this image belongs to'
| def _cache_image_from_file(self, image_path, img_type, tvdb_id):
| if (img_type == self.POSTER):
dest_path = self.poster_path(tvdb_id)
elif (img_type == self.BANNER):
dest_path = self.banner_path(tvdb_id)
else:
logger.log((u'Invalid cache image type: ' + str(img_type)), logger.ERROR)
return False
if (not ek.ek(os.path.isdir, self._cache_dir())):
logger.log((u"Image cache dir didn't exist, creating it at " + str(self._cache_dir())))
ek.ek(os.makedirs, self._cache_dir())
logger.log((((u'Copying from ' + image_path) + ' to ') + dest_path))
helpers.copyFile(image_path, dest_path)
return True
|
'Retrieves an image of the type specified from TVDB and saves it to the cache folder
returns: bool representing success
show_obj: TVShow object that we want to cache an image for
img_type: BANNER or POSTER'
| def _cache_image_from_tvdb(self, show_obj, img_type):
| if (img_type == self.POSTER):
img_type_name = 'poster'
dest_path = self.poster_path(show_obj.tvdbid)
elif (img_type == self.BANNER):
img_type_name = 'banner'
dest_path = self.banner_path(show_obj.tvdbid)
else:
logger.log((u'Invalid cache image type: ' + str(img_type)), logger.ERROR)
return False
metadata_generator = GenericMetadata()
img_data = metadata_generator._retrieve_show_image(img_type_name, show_obj)
result = metadata_generator._write_image(img_data, dest_path)
return result
|
'Caches all images for the given show. Copies them from the show dir if possible, or
downloads them from TVDB if they aren\'t in the show dir.
show_obj: TVShow object to cache images for'
| def fill_cache(self, show_obj):
| logger.log((u'Checking if we need any cache images for show ' + str(show_obj.tvdbid)), logger.DEBUG)
need_images = {self.POSTER: (not self.has_poster(show_obj.tvdbid)), self.BANNER: (not self.has_banner(show_obj.tvdbid))}
if ((not need_images[self.POSTER]) and (not need_images[self.BANNER])):
logger.log(u'No new cache images needed, not retrieving new ones')
return
try:
for cur_provider in sickbeard.metadata_provider_dict.values():
logger.log(((u'Checking if we can use the show image from the ' + cur_provider.name) + ' metadata'), logger.DEBUG)
if ek.ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
cur_file_name = os.path.abspath(cur_provider.get_poster_path(show_obj))
cur_file_type = self.which_type(cur_file_name)
if (cur_file_type == None):
logger.log((u'Unable to retrieve image type, not using the image from ' + str(cur_file_name)), logger.WARNING)
continue
logger.log((((((u'Checking if image ' + cur_file_name) + ' (type ') + str(cur_file_type)) + ' needs metadata: ') + str(need_images[cur_file_type])), logger.DEBUG)
if ((cur_file_type in need_images) and need_images[cur_file_type]):
logger.log((((u"Found an image in the show dir that doesn't exist in the cache, caching it: " + cur_file_name) + ', type ') + str(cur_file_type)), logger.DEBUG)
self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.tvdbid)
need_images[cur_file_type] = False
except exceptions.ShowDirNotFoundException:
logger.log(u"Unable to search for images in show dir because it doesn't exist", logger.WARNING)
for cur_image_type in [self.POSTER, self.BANNER]:
logger.log((((u'Seeing if we still need an image of type ' + str(cur_image_type)) + ': ') + str(need_images[cur_image_type])), logger.DEBUG)
if ((cur_image_type in need_images) and need_images[cur_image_type]):
self._cache_image_from_tvdb(show_obj, cur_image_type)
logger.log(u'Done cache check')
|
'Returns the path where the episode thumbnail should be stored. Defaults to
the same path as the episode file but with a .tbn extension.
ep_obj: a TVEpisode instance for which to create the thumbnail'
| def get_episode_thumb_path(self, ep_obj):
| if ek.ek(os.path.isfile, ep_obj.location):
tbn_filename = helpers.replaceExtension(ep_obj.location, 'tbn')
else:
return None
return tbn_filename
|
'Returns the full path to the file for a given season poster.
show_obj: a TVShow instance for which to generate the path
season: a season number to be used for the path. Note that season 0
means specials.'
| def get_season_poster_path(self, show_obj, season):
| if (season == 0):
season_poster_filename = 'season-specials'
else:
season_poster_filename = ('season' + str(season).zfill(2))
return ek.ek(os.path.join, show_obj.location, (season_poster_filename + '.tbn'))
|
'Creates an elementTree XML structure for an XBMC-style tvshow.nfo and
returns the resulting data object.
show_obj: a TVShow instance to create the NFO for'
| def _show_data(self, show_obj):
| show_ID = show_obj.tvdbid
tvdb_lang = show_obj.lang
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if (tvdb_lang and (not (tvdb_lang == 'en'))):
ltvdb_api_parms['language'] = tvdb_lang
t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms)
tv_node = etree.Element('tvshow')
try:
myShow = t[int(show_ID)]
except tvdb_exceptions.tvdb_shownotfound:
logger.log(((u'Unable to find show with id ' + str(show_ID)) + ' on tvdb, skipping it'), logger.ERROR)
raise
except tvdb_exceptions.tvdb_error:
logger.log(u"TVDB is down, can't use its data to add this show", logger.ERROR)
raise
try:
if ((myShow['seriesname'] is None) or (myShow['seriesname'] == '') or (myShow['id'] is None) or (myShow['id'] == '')):
logger.log(((u'Incomplete info for show with id ' + str(show_ID)) + ' on tvdb, skipping it'), logger.ERROR)
return False
except tvdb_exceptions.tvdb_attributenotfound:
logger.log(((u'Incomplete info for show with id ' + str(show_ID)) + ' on tvdb, skipping it'), logger.ERROR)
return False
title = etree.SubElement(tv_node, 'title')
if (myShow['seriesname'] is not None):
title.text = myShow['seriesname']
rating = etree.SubElement(tv_node, 'rating')
if (myShow['rating'] is not None):
rating.text = myShow['rating']
year = etree.SubElement(tv_node, 'year')
if (myShow['firstaired'] is not None):
try:
year_text = str(datetime.datetime.strptime(myShow['firstaired'], '%Y-%m-%d').year)
if year_text:
year.text = year_text
except:
pass
plot = etree.SubElement(tv_node, 'plot')
if (myShow['overview'] is not None):
plot.text = myShow['overview']
episodeguide = etree.SubElement(tv_node, 'episodeguide')
episodeguideurl = etree.SubElement(episodeguide, 'url')
episodeguideurl2 = etree.SubElement(tv_node, 'episodeguideurl')
if (myShow['id'] is not None):
showurl = (((sickbeard.TVDB_BASE_URL + '/series/') + myShow['id']) + '/all/en.zip')
episodeguideurl.text = showurl
episodeguideurl2.text = showurl
mpaa = etree.SubElement(tv_node, 'mpaa')
if (myShow['contentrating'] is not None):
mpaa.text = myShow['contentrating']
tvdbid = etree.SubElement(tv_node, 'id')
if (myShow['id'] is not None):
tvdbid.text = myShow['id']
genre = etree.SubElement(tv_node, 'genre')
if (myShow['genre'] is not None):
genre.text = ' / '.join([x.strip() for x in myShow['genre'].split('|') if (x and x.strip())])
premiered = etree.SubElement(tv_node, 'premiered')
if (myShow['firstaired'] is not None):
premiered.text = myShow['firstaired']
studio = etree.SubElement(tv_node, 'studio')
if (myShow['network'] is not None):
studio.text = myShow['network']
if (myShow['_actors'] is not None):
for actor in myShow['_actors']:
cur_actor_name_text = actor['name']
if ((cur_actor_name_text is not None) and cur_actor_name_text.strip()):
cur_actor = etree.SubElement(tv_node, 'actor')
cur_actor_name = etree.SubElement(cur_actor, 'name')
cur_actor_name.text = cur_actor_name_text.strip()
cur_actor_role = etree.SubElement(cur_actor, 'role')
cur_actor_role_text = actor['role']
if (cur_actor_role_text is not None):
cur_actor_role.text = cur_actor_role_text
cur_actor_thumb = etree.SubElement(cur_actor, 'thumb')
cur_actor_thumb_text = actor['image']
if (cur_actor_thumb_text is not None):
cur_actor_thumb.text = cur_actor_thumb_text
helpers.indentXML(tv_node)
data = etree.ElementTree(tv_node)
return data
|
'Creates an elementTree XML structure for an XBMC-style episode.nfo and
returns the resulting data object.
show_obj: a TVEpisode instance to create the NFO for'
| def _ep_data(self, ep_obj):
| eps_to_write = ([ep_obj] + ep_obj.relatedEps)
tvdb_lang = ep_obj.show.lang
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if (tvdb_lang and (not (tvdb_lang == 'en'))):
ltvdb_api_parms['language'] = tvdb_lang
try:
t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms)
myShow = t[ep_obj.show.tvdbid]
except tvdb_exceptions.tvdb_shownotfound as e:
raise exceptions.ShowNotFoundException(e.message)
except tvdb_exceptions.tvdb_error as e:
logger.log((u'Unable to connect to TVDB while creating meta files - skipping - ' + ex(e)), logger.ERROR)
return
if (len(eps_to_write) > 1):
rootNode = etree.Element('xbmcmultiepisode')
else:
rootNode = etree.Element('episodedetails')
for curEpToWrite in eps_to_write:
try:
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound):
logger.log(((((u'Unable to find episode ' + str(curEpToWrite.season)) + 'x') + str(curEpToWrite.episode)) + ' on tvdb... has it been removed? Should I delete from db?'))
return None
if (not myEp['firstaired']):
myEp['firstaired'] = str(datetime.date.fromordinal(1))
if (not myEp['episodename']):
logger.log(u'Not generating nfo because the ep has no title', logger.DEBUG)
return None
logger.log((((u'Creating metadata for episode ' + str(ep_obj.season)) + 'x') + str(ep_obj.episode)), logger.DEBUG)
if (len(eps_to_write) > 1):
episode = etree.SubElement(rootNode, 'episodedetails')
else:
episode = rootNode
title = etree.SubElement(episode, 'title')
if (curEpToWrite.name is not None):
title.text = curEpToWrite.name
showtitle = etree.SubElement(episode, 'showtitle')
if (curEpToWrite.show.name is not None):
showtitle.text = curEpToWrite.show.name
season = etree.SubElement(episode, 'season')
season.text = str(curEpToWrite.season)
episodenum = etree.SubElement(episode, 'episode')
episodenum.text = str(curEpToWrite.episode)
uniqueid = etree.SubElement(episode, 'uniqueid')
uniqueid.text = str(curEpToWrite.tvdbid)
aired = etree.SubElement(episode, 'aired')
if (curEpToWrite.airdate != datetime.date.fromordinal(1)):
aired.text = str(curEpToWrite.airdate)
else:
aired.text = ''
plot = etree.SubElement(episode, 'plot')
if (curEpToWrite.description is not None):
plot.text = curEpToWrite.description
runtime = etree.SubElement(episode, 'runtime')
if (curEpToWrite.season != 0):
if (myShow['runtime'] is not None):
runtime.text = myShow['runtime']
displayseason = etree.SubElement(episode, 'displayseason')
if ('airsbefore_season' in myEp):
displayseason_text = myEp['airsbefore_season']
if (displayseason_text is not None):
displayseason.text = displayseason_text
displayepisode = etree.SubElement(episode, 'displayepisode')
if ('airsbefore_episode' in myEp):
displayepisode_text = myEp['airsbefore_episode']
if (displayepisode_text is not None):
displayepisode.text = displayepisode_text
thumb = etree.SubElement(episode, 'thumb')
thumb_text = myEp['filename']
if (thumb_text is not None):
thumb.text = thumb_text
watched = etree.SubElement(episode, 'watched')
watched.text = 'false'
credits = etree.SubElement(episode, 'credits')
credits_text = myEp['writer']
if (credits_text is not None):
credits.text = credits_text
director = etree.SubElement(episode, 'director')
director_text = myEp['director']
if (director_text is not None):
director.text = director_text
rating = etree.SubElement(episode, 'rating')
rating_text = myEp['rating']
if (rating_text is not None):
rating.text = rating_text
gueststar_text = myEp['gueststars']
if (gueststar_text is not None):
for actor in (x.strip() for x in gueststar_text.split('|') if (x and x.strip())):
cur_actor = etree.SubElement(episode, 'actor')
cur_actor_name = etree.SubElement(cur_actor, 'name')
cur_actor_name.text = actor
if (myShow['_actors'] is not None):
for actor in myShow['_actors']:
cur_actor_name_text = actor['name']
if ((cur_actor_name_text is not None) and cur_actor_name_text.strip()):
cur_actor = etree.SubElement(episode, 'actor')
cur_actor_name = etree.SubElement(cur_actor, 'name')
cur_actor_name.text = cur_actor_name_text.strip()
cur_actor_role = etree.SubElement(cur_actor, 'role')
cur_actor_role_text = actor['role']
if (cur_actor_role_text is not None):
cur_actor_role.text = cur_actor_role_text
cur_actor_thumb = etree.SubElement(cur_actor, 'thumb')
cur_actor_thumb_text = actor['image']
if (cur_actor_thumb_text is not None):
cur_actor_thumb.text = cur_actor_thumb_text
helpers.indentXML(rootNode)
data = etree.ElementTree(rootNode)
return data
|
'Returns the path where the episode thumbnail should be stored. Defaults to
the same path as the episode file but with a .cover.jpg extension.
ep_obj: a TVEpisode instance for which to create the thumbnail'
| def get_episode_thumb_path(self, ep_obj):
| if ek.ek(os.path.isfile, ep_obj.location):
tbn_filename = (ep_obj.location + '.cover.jpg')
else:
return None
return tbn_filename
|
'Returns a full show dir/.meta/episode.txt path for Tivo
episode metadata files.
Note, that pyTivo requires the metadata filename to include the original extention.
ie If the episode name is foo.avi, the metadata name is foo.avi.txt
ep_obj: a TVEpisode object to get the path for'
| def get_episode_file_path(self, ep_obj):
| if ek.ek(os.path.isfile, ep_obj.location):
metadata_file_name = ((ek.ek(os.path.basename, ep_obj.location) + '.') + self._ep_nfo_extension)
metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), '.meta')
metadata_file_path = ek.ek(os.path.join, metadata_dir_name, metadata_file_name)
else:
logger.log((u"Episode location doesn't exist: " + str(ep_obj.location)), logger.DEBUG)
return ''
return metadata_file_path
|
'Creates a key value structure for a Tivo episode metadata file and
returns the resulting data object.
ep_obj: a TVEpisode instance to create the metadata file for.
Lookup the show in http://thetvdb.com/ using the python library:
https://github.com/dbr/tvdb_api/
The results are saved in the object myShow.
The key values for the tivo metadata file are from:
http://pytivo.sourceforge.net/wiki/index.php/Metadata'
| def _ep_data(self, ep_obj):
| data = ''
eps_to_write = ([ep_obj] + ep_obj.relatedEps)
tvdb_lang = ep_obj.show.lang
try:
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if (tvdb_lang and (not (tvdb_lang == 'en'))):
ltvdb_api_parms['language'] = tvdb_lang
t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms)
myShow = t[ep_obj.show.tvdbid]
except tvdb_exceptions.tvdb_shownotfound as e:
raise exceptions.ShowNotFoundException(str(e))
except tvdb_exceptions.tvdb_error as e:
logger.log((u'Unable to connect to TVDB while creating meta files - skipping - ' + str(e)), logger.ERROR)
return False
for curEpToWrite in eps_to_write:
try:
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound):
logger.log(((((u'Unable to find episode ' + str(curEpToWrite.season)) + 'x') + str(curEpToWrite.episode)) + ' on tvdb... has it been removed? Should I delete from db?'))
return None
if ((myEp['firstaired'] is None) and (ep_obj.season == 0)):
myEp['firstaired'] = str(datetime.date.fromordinal(1))
if ((myEp['episodename'] is None) or (myEp['firstaired'] is None)):
return None
if (myShow['seriesname'] is not None):
data += (('title : ' + myShow['seriesname']) + '\n')
data += (('seriesTitle : ' + myShow['seriesname']) + '\n')
data += (('episodeTitle : ' + curEpToWrite._format_pattern('%Sx%0E %EN')) + '\n')
data += (('episodeNumber : ' + str(curEpToWrite.episode)) + '\n')
data += 'isEpisode : true\n'
sanitizedDescription = curEpToWrite.description
sanitizedDescription = sanitizedDescription.replace(u'\u201c', '"').replace(u'\u201d', '"')
sanitizedDescription = sanitizedDescription.replace(u'\u2018', "'").replace(u'\u2019', "'").replace(u'\u02bc', "'")
data += (('description : ' + sanitizedDescription) + '\n')
if (myShow['zap2it_id'] is not None):
data += (('seriesId : ' + myShow['zap2it_id']) + '\n')
if (myShow['network'] is not None):
data += (('callsign : ' + myShow['network']) + '\n')
if (curEpToWrite.airdate != datetime.date.fromordinal(1)):
data += (('originalAirDate : ' + str(curEpToWrite.airdate)) + 'T00:00:00Z\n')
if myShow['actors']:
for actor in myShow['actors'].split('|'):
if ((actor is not None) and actor.strip()):
data += (('vActor : ' + actor.strip()) + '\n')
if (myEp['rating'] is not None):
try:
rating = float(myEp['rating'])
except ValueError:
rating = 0.0
rating = (round(((8 * rating) / 10)) / 2.0)
data += (('starRating : ' + str(rating)) + '\n')
if myShow['contentrating']:
data += (('tvRating : ' + str(myShow['contentrating'])) + '\n')
if ep_obj.show.genre:
for genre in ep_obj.show.genre.split('|'):
if (genre and genre.strip()):
data += (('vProgramGenre : ' + str(genre.strip())) + '\n')
return data
|
'Generates and writes ep_obj\'s metadata under the given path with the
given filename root. Uses the episode\'s name with the extension in
_ep_nfo_extension.
ep_obj: TVEpisode object for which to create the metadata
file_name_path: The file name to use for this metadata. Note that the extension
will be automatically added based on _ep_nfo_extension. This should
include an absolute path.'
| def write_ep_file(self, ep_obj):
| data = self._ep_data(ep_obj)
if (not data):
return False
nfo_file_path = self.get_episode_file_path(ep_obj)
nfo_file_dir = ek.ek(os.path.dirname, nfo_file_path)
try:
if (not ek.ek(os.path.isdir, nfo_file_dir)):
logger.log((u"Metadata dir didn't exist, creating it at " + nfo_file_dir), logger.DEBUG)
ek.ek(os.makedirs, nfo_file_dir)
helpers.chmodAsParent(nfo_file_dir)
logger.log((u'Writing episode nfo file to ' + nfo_file_path), logger.DEBUG)
with ek.ek(open, nfo_file_path, 'w') as nfo_file:
nfo_file.write(data.encode('utf-8'))
helpers.chmodAsParent(nfo_file_path)
except EnvironmentError as e:
logger.log((((u'Unable to write file to ' + nfo_file_path) + ' - are you sure the folder is writable? ') + ex(e)), logger.ERROR)
return False
return True
|
'Returns the path where the episode thumbnail should be stored.
ep_obj: a TVEpisode instance for which to create the thumbnail'
| def get_episode_thumb_path(self, ep_obj):
| if ek.ek(os.path.isfile, ep_obj.location):
tbn_filename = ep_obj.location.rpartition('.')
if (tbn_filename[0] == ''):
tbn_filename = (ep_obj.location + '-thumb.jpg')
else:
tbn_filename = (tbn_filename[0] + '-thumb.jpg')
else:
return None
return tbn_filename
|
'Returns the full path to the file for a given season poster.
show_obj: a TVShow instance for which to generate the path
season: a season number to be used for the path. Note that season 0
means specials.'
| def get_season_poster_path(self, show_obj, season):
| if (season == 0):
season_poster_filename = 'season-specials'
else:
season_poster_filename = ('season' + str(season).zfill(2))
return ek.ek(os.path.join, show_obj.location, (season_poster_filename + '-poster.jpg'))
|
'Returns the full path to the file for a given season banner.
show_obj: a TVShow instance for which to generate the path
season: a season number to be used for the path. Note that season 0
means specials.'
| def get_season_banner_path(self, show_obj, season):
| if (season == 0):
season_banner_filename = 'season-specials'
else:
season_banner_filename = ('season' + str(season).zfill(2))
return ek.ek(os.path.join, show_obj.location, (season_banner_filename + '-banner.jpg'))
|
'This should be overridden by the implementing class. It should
provide the content of the show metadata file.'
| def _show_data(self, show_obj):
| return None
|
'This should be overridden by the implementing class. It should
provide the content of the episode metadata file.'
| def _ep_data(self, ep_obj):
| return None
|
'Returns the URL to use for downloading an episode\'s thumbnail. Uses
theTVDB.com data.
ep_obj: a TVEpisode object for which to grab the thumb URL'
| def _get_episode_thumb_url(self, ep_obj):
| all_eps = ([ep_obj] + ep_obj.relatedEps)
tvdb_lang = ep_obj.show.lang
try:
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if (tvdb_lang and (not (tvdb_lang == 'en'))):
ltvdb_api_parms['language'] = tvdb_lang
t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms)
tvdb_show_obj = t[ep_obj.show.tvdbid]
except tvdb_exceptions.tvdb_shownotfound as e:
raise exceptions.ShowNotFoundException(e.message)
except tvdb_exceptions.tvdb_error as e:
logger.log((u'Unable to connect to TVDB while creating meta files - skipping - ' + ex(e)), logger.ERROR)
return None
for cur_ep in all_eps:
try:
myEp = tvdb_show_obj[cur_ep.season][cur_ep.episode]
except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound):
logger.log(((((u'Unable to find episode ' + str(cur_ep.season)) + 'x') + str(cur_ep.episode)) + ' on tvdb... has it been removed? Should I delete from db?'))
continue
thumb_url = myEp['filename']
if thumb_url:
return thumb_url
return None
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.