desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Generates and writes show_obj\'s metadata under the given path to the filename given by get_show_file_path() show_obj: TVShow object for which to create the metadata path: An absolute or relative path where we should put the file. Note that the file name will be the default show_file_name. Note that this method expects that _show_data will return an ElementTree object. If your _show_data returns data in another format you\'ll need to override this method.'
def write_show_file(self, show_obj):
data = self._show_data(show_obj) if (not data): return False nfo_file_path = self.get_show_file_path(show_obj) nfo_file_dir = ek.ek(os.path.dirname, nfo_file_path) try: if (not ek.ek(os.path.isdir, nfo_file_dir)): logger.log((u"Metadata dir didn't exist, creating it at " + nfo_file_dir), logger.DEBUG) ek.ek(os.makedirs, nfo_file_dir) helpers.chmodAsParent(nfo_file_dir) logger.log((u'Writing show nfo file to ' + nfo_file_path), logger.DEBUG) nfo_file = ek.ek(open, nfo_file_path, 'w') if nfo_file_path.endswith('.xml'): nfo_file.write('<?xml version="1.0" encoding="UTF-8"?>\n') data.write(nfo_file, encoding='utf-8') nfo_file.close() helpers.chmodAsParent(nfo_file_path) except IOError as e: logger.log((((u'Unable to write file to ' + nfo_file_path) + ' - are you sure the folder is writable? ') + ex(e)), logger.ERROR) return False return True
'Generates and writes ep_obj\'s metadata under the given path with the given filename root. Uses the episode\'s name with the extension in _ep_nfo_extension. ep_obj: TVEpisode object for which to create the metadata file_name_path: The file name to use for this metadata. Note that the extension will be automatically added based on _ep_nfo_extension. This should include an absolute path. Note that this method expects that _ep_data will return an ElementTree object. If your _ep_data returns data in another format you\'ll need to override this method.'
def write_ep_file(self, ep_obj):
data = self._ep_data(ep_obj) if (not data): return False nfo_file_path = self.get_episode_file_path(ep_obj) nfo_file_dir = ek.ek(os.path.dirname, nfo_file_path) try: if (not ek.ek(os.path.isdir, nfo_file_dir)): logger.log((u"Metadata dir didn't exist, creating it at " + nfo_file_dir), logger.DEBUG) ek.ek(os.makedirs, nfo_file_dir) helpers.chmodAsParent(nfo_file_dir) logger.log((u'Writing episode nfo file to ' + nfo_file_path), logger.DEBUG) nfo_file = ek.ek(open, nfo_file_path, 'w') if nfo_file_path.endswith('.xml'): nfo_file.write('<?xml version="1.0" encoding="UTF-8"?>\n') data.write(nfo_file, encoding='utf-8') nfo_file.close() helpers.chmodAsParent(nfo_file_path) except IOError as e: logger.log((((u'Unable to write file to ' + nfo_file_path) + ' - are you sure the folder is writable? ') + ex(e)), logger.ERROR) return False return True
'Retrieves a thumbnail and saves it to the correct spot. This method should not need to be overridden by implementing classes, changing get_episode_thumb_path and _get_episode_thumb_url should suffice. ep_obj: a TVEpisode object for which to generate a thumbnail'
def save_thumbnail(self, ep_obj):
file_path = self.get_episode_thumb_path(ep_obj) if (not file_path): logger.log(u'Unable to find a file path to use for this thumbnail, not generating it', logger.DEBUG) return False thumb_url = self._get_episode_thumb_url(ep_obj) if (not thumb_url): logger.log(u'No thumb is available for this episode, not creating a thumb', logger.DEBUG) return False thumb_data = metadata_helpers.getShowImage(thumb_url) result = self._write_image(thumb_data, file_path) if (not result): return False for cur_ep in ([ep_obj] + ep_obj.relatedEps): cur_ep.hastbn = True return True
'Downloads a fanart image and saves it to the filename specified by fanart_name inside the show\'s root folder. show_obj: a TVShow object for which to download fanart'
def save_fanart(self, show_obj, which=None):
fanart_path = self.get_fanart_path(show_obj) fanart_data = self._retrieve_show_image('fanart', show_obj, which) if (not fanart_data): logger.log(u'No fanart image was retrieved, unable to write fanart', logger.DEBUG) return False return self._write_image(fanart_data, fanart_path)
'Downloads a poster image and saves it to the filename specified by poster_name inside the show\'s root folder. show_obj: a TVShow object for which to download a poster'
def save_poster(self, show_obj, which=None):
poster_path = self.get_poster_path(show_obj) poster_data = self._retrieve_show_image('poster', show_obj, which) if (not poster_data): logger.log(u'No show poster image was retrieved, unable to write poster', logger.DEBUG) return False return self._write_image(poster_data, poster_path)
'Downloads a banner image and saves it to the filename specified by banner_name inside the show\'s root folder. show_obj: a TVShow object for which to download a banner'
def save_banner(self, show_obj, which=None):
banner_path = self.get_banner_path(show_obj) banner_data = self._retrieve_show_image('banner', show_obj, which) if (not banner_data): logger.log(u'No show banner image was retrieved, unable to write banner', logger.DEBUG) return False return self._write_image(banner_data, banner_path)
'Saves all season posters to disk for the given show. show_obj: a TVShow object for which to save the season thumbs Cycles through all seasons and saves the season posters if possible. This method should not need to be overridden by implementing classes, changing _season_posters_dict and get_season_poster_path should be good enough.'
def save_season_posters(self, show_obj, season):
season_dict = self._season_posters_dict(show_obj, season) result = [] for cur_season in season_dict: cur_season_art = season_dict[cur_season] if (len(cur_season_art) == 0): continue (art_id, season_url) = cur_season_art.popitem() season_poster_file_path = self.get_season_poster_path(show_obj, cur_season) if (not season_poster_file_path): logger.log(((u'Path for season ' + str(cur_season)) + ' came back blank, skipping this season'), logger.DEBUG) continue seasonData = metadata_helpers.getShowImage(season_url) if (not seasonData): logger.log(u'No season poster data available, skipping this season', logger.DEBUG) continue result = (result + [self._write_image(seasonData, season_poster_file_path)]) if result: return all(result) else: return False return True
'Saves all season banners to disk for the given show. show_obj: a TVShow object for which to save the season thumbs Cycles through all seasons and saves the season banners if possible. This method should not need to be overridden by implementing classes, changing _season_banners_dict and get_season_banner_path should be good enough.'
def save_season_banners(self, show_obj, season):
season_dict = self._season_banners_dict(show_obj, season) result = [] for cur_season in season_dict: cur_season_art = season_dict[cur_season] if (len(cur_season_art) == 0): continue (art_id, season_url) = cur_season_art.popitem() season_banner_file_path = self.get_season_banner_path(show_obj, cur_season) if (not season_banner_file_path): logger.log(((u'Path for season ' + str(cur_season)) + ' came back blank, skipping this season'), logger.DEBUG) continue seasonData = metadata_helpers.getShowImage(season_url) if (not seasonData): logger.log(u'No season banner data available, skipping this season', logger.DEBUG) continue result = (result + [self._write_image(seasonData, season_banner_file_path)]) if result: return all(result) else: return False return True
'Saves the data in image_data to the location image_path. Returns True/False to represent success or failure. image_data: binary image data to write to file image_path: file location to save the image to'
def _write_image(self, image_data, image_path):
if ek.ek(os.path.isfile, image_path): logger.log(u'Image already exists, not downloading', logger.DEBUG) return False if (not image_data): logger.log(u'Unable to retrieve image, skipping', logger.WARNING) return False image_dir = ek.ek(os.path.dirname, image_path) try: if (not ek.ek(os.path.isdir, image_dir)): logger.log((u"Metadata dir didn't exist, creating it at " + image_dir), logger.DEBUG) ek.ek(os.makedirs, image_dir) helpers.chmodAsParent(image_dir) outFile = ek.ek(open, image_path, 'wb') outFile.write(image_data) outFile.close() helpers.chmodAsParent(image_path) except IOError as e: logger.log((((u'Unable to write image to ' + image_path) + ' - are you sure the show folder is writable? ') + ex(e)), logger.ERROR) return False return True
'Gets an image URL from theTVDB.com, downloads it and returns the data. image_type: type of image to retrieve (currently supported: fanart, poster, banner) show_obj: a TVShow object to use when searching for the image which: optional, a specific numbered poster to look for Returns: the binary image data if available, or else None'
def _retrieve_show_image(self, image_type, show_obj, which=None):
tvdb_lang = show_obj.lang try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if (tvdb_lang and (not (tvdb_lang == 'en'))): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(banners=True, **ltvdb_api_parms) tvdb_show_obj = t[show_obj.tvdbid] except (tvdb_exceptions.tvdb_error, IOError) as e: logger.log((u'Unable to look up show on TVDB, not downloading images: ' + ex(e)), logger.ERROR) return None if (image_type not in ('fanart', 'poster', 'banner')): logger.log(((u'Invalid image type ' + str(image_type)) + ", couldn't find it in the TVDB object"), logger.ERROR) return None image_url = tvdb_show_obj[image_type] image_data = metadata_helpers.getShowImage(image_url, which) return image_data
'Should return a dict like: result = {<season number>: {1: \'<url 1>\', 2: <url 2>, ...},}'
def _season_posters_dict(self, show_obj, season):
result = {} tvdb_lang = show_obj.lang try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if (tvdb_lang and (not (tvdb_lang == 'en'))): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(banners=True, **ltvdb_api_parms) tvdb_show_obj = t[show_obj.tvdbid] except (tvdb_exceptions.tvdb_error, IOError) as e: logger.log((u'Unable to look up show on TVDB, not downloading images: ' + ex(e)), logger.ERROR) return result if (('season' not in tvdb_show_obj['_banners']) or ('season' not in tvdb_show_obj['_banners']['season'])): return result seasonsArtObj = tvdb_show_obj['_banners']['season']['season'] result[season] = {} for seasonArtID in seasonsArtObj.keys(): if ((int(seasonsArtObj[seasonArtID]['season']) == season) and (seasonsArtObj[seasonArtID]['language'] == 'en')): result[season][seasonArtID] = seasonsArtObj[seasonArtID]['_bannerpath'] return result
'Should return a dict like: result = {<season number>: {1: \'<url 1>\', 2: <url 2>, ...},}'
def _season_banners_dict(self, show_obj, season):
result = {} tvdb_lang = show_obj.lang try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if (tvdb_lang and (not (tvdb_lang == 'en'))): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(banners=True, **ltvdb_api_parms) tvdb_show_obj = t[show_obj.tvdbid] except (tvdb_exceptions.tvdb_error, IOError) as e: logger.log((u'Unable to look up show on TVDB, not downloading images: ' + ex(e)), logger.ERROR) return result if (('season' not in tvdb_show_obj['_banners']) or ('seasonwide' not in tvdb_show_obj['_banners']['season'])): return result seasonsArtObj = tvdb_show_obj['_banners']['season']['seasonwide'] result[season] = {} for seasonArtID in seasonsArtObj.keys(): if ((int(seasonsArtObj[seasonArtID]['season']) == season) and (seasonsArtObj[seasonArtID]['language'] == 'en')): result[season][seasonArtID] = seasonsArtObj[seasonArtID]['_bannerpath'] return result
'Used only when mass adding Existing Shows, using previously generated Show metadata to reduce the need to query TVDB.'
def retrieveShowMetadata(self, folder):
empty_return = (None, None) metadata_path = ek.ek(os.path.join, folder, self._show_metadata_filename) if ((not ek.ek(os.path.isdir, folder)) or (not ek.ek(os.path.isfile, metadata_path))): logger.log(((u"Can't load the metadata file from " + repr(metadata_path)) + ", it doesn't exist"), logger.DEBUG) return empty_return logger.log((u'Loading show info from metadata file in ' + folder), logger.DEBUG) try: with ek.ek(open, metadata_path, 'r') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) if ((showXML.findtext('title') == None) or ((showXML.findtext('tvdbid') == None) and (showXML.findtext('id') == None))): logger.log((((((u'Invalid info in tvshow.nfo (missing name or id):' + str(showXML.findtext('title'))) + ' ') + str(showXML.findtext('tvdbid'))) + ' ') + str(showXML.findtext('id')))) return empty_return name = showXML.findtext('title') if (showXML.findtext('tvdbid') is not None): tvdb_id = int(showXML.findtext('tvdbid')) elif showXML.findtext('id'): tvdb_id = int(showXML.findtext('id')) else: logger.log(u'Empty <id> or <tvdbid> field in NFO, unable to find an ID', logger.WARNING) return empty_return if (not tvdb_id): logger.log(((u'Invalid tvdb id (' + str(tvdb_id)) + '), not using metadata file'), logger.WARNING) return empty_return except Exception as e: logger.log((((u"There was an error parsing your existing metadata file: '" + metadata_path) + "' error: ") + ex(e)), logger.WARNING) return empty_return return (tvdb_id, name)
'Returns a full show dir/metadata/episode.xml path for MediaBrowser episode metadata files ep_obj: a TVEpisode object to get the path for'
def get_episode_file_path(self, ep_obj):
if ek.ek(os.path.isfile, ep_obj.location): xml_file_name = helpers.replaceExtension(ek.ek(os.path.basename, ep_obj.location), self._ep_nfo_extension) metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata') xml_file_path = ek.ek(os.path.join, metadata_dir_name, xml_file_name) else: logger.log((u"Episode location doesn't exist: " + str(ep_obj.location)), logger.DEBUG) return '' return xml_file_path
'Returns a full show dir/metadata/episode.jpg path for MediaBrowser episode thumbs. ep_obj: a TVEpisode object to get the path from'
def get_episode_thumb_path(self, ep_obj):
if ek.ek(os.path.isfile, ep_obj.location): tbn_file_name = helpers.replaceExtension(ek.ek(os.path.basename, ep_obj.location), 'jpg') metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), 'metadata') tbn_file_path = ek.ek(os.path.join, metadata_dir_name, tbn_file_name) else: return None return tbn_file_path
'Season thumbs for MediaBrowser go in Show Dir/Season X/folder.jpg If no season folder exists, None is returned'
def get_season_poster_path(self, show_obj, season):
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\\s+(\\d+)$' season_dir = None for cur_dir in dir_list: if ((season == 0) and (cur_dir == 'Specials')): season_dir = cur_dir break match = re.match(season_dir_regex, cur_dir, re.I) if (not match): continue cur_season = int(match.group(1)) if (cur_season == season): season_dir = cur_dir break if (not season_dir): logger.log((u'Unable to find a season dir for season ' + str(season)), logger.DEBUG) return None logger.log((((u'Using ' + str(season_dir)) + '/folder.jpg as season dir for season ') + str(season)), logger.DEBUG) return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg')
'Season thumbs for MediaBrowser go in Show Dir/Season X/banner.jpg If no season folder exists, None is returned'
def get_season_banner_path(self, show_obj, season):
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\\s+(\\d+)$' season_dir = None for cur_dir in dir_list: if ((season == 0) and (cur_dir == 'Specials')): season_dir = cur_dir break match = re.match(season_dir_regex, cur_dir, re.I) if (not match): continue cur_season = int(match.group(1)) if (cur_season == season): season_dir = cur_dir break if (not season_dir): logger.log((u'Unable to find a season dir for season ' + str(season)), logger.DEBUG) return None logger.log((((u'Using ' + str(season_dir)) + '/banner.jpg as season dir for season ') + str(season)), logger.DEBUG) return ek.ek(os.path.join, show_obj.location, season_dir, 'banner.jpg')
'Creates an elementTree XML structure for a MediaBrowser-style series.xml returns the resulting data object. show_obj: a TVShow instance to create the NFO for'
def _show_data(self, show_obj):
tvdb_lang = show_obj.lang ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if (tvdb_lang and (not (tvdb_lang == 'en'))): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms) tv_node = etree.Element('Series') try: myShow = t[int(show_obj.tvdbid)] except tvdb_exceptions.tvdb_shownotfound: logger.log(((u'Unable to find show with id ' + str(show_obj.tvdbid)) + ' on tvdb, skipping it'), logger.ERROR) raise except tvdb_exceptions.tvdb_error: logger.log(u"TVDB is down, can't use its data to make the NFO", logger.ERROR) raise try: if ((myShow['seriesname'] is None) or (myShow['seriesname'] == '') or (myShow['id'] is None) or (myShow['id'] == '')): logger.log(((u'Incomplete info for show with id ' + str(show_obj.tvdbid)) + ' on tvdb, skipping it'), logger.ERROR) return False except tvdb_exceptions.tvdb_attributenotfound: logger.log(((u'Incomplete info for show with id ' + str(show_obj.tvdbid)) + ' on tvdb, skipping it'), logger.ERROR) return False tvdbid = etree.SubElement(tv_node, 'id') if (myShow['id'] is not None): tvdbid.text = myShow['id'] SeriesName = etree.SubElement(tv_node, 'SeriesName') if (myShow['seriesname'] is not None): SeriesName.text = myShow['seriesname'] Status = etree.SubElement(tv_node, 'Status') if (myShow['status'] is not None): Status.text = myShow['status'] Network = etree.SubElement(tv_node, 'Network') if (myShow['network'] is not None): Network.text = myShow['network'] Airs_Time = etree.SubElement(tv_node, 'Airs_Time') if (myShow['airs_time'] is not None): Airs_Time.text = myShow['airs_time'] Airs_DayOfWeek = etree.SubElement(tv_node, 'Airs_DayOfWeek') if (myShow['airs_dayofweek'] is not None): Airs_DayOfWeek.text = myShow['airs_dayofweek'] FirstAired = etree.SubElement(tv_node, 'FirstAired') if (myShow['firstaired'] is not None): FirstAired.text = myShow['firstaired'] ContentRating = etree.SubElement(tv_node, 'ContentRating') MPAARating = etree.SubElement(tv_node, 'MPAARating') certification = etree.SubElement(tv_node, 'certification') if (myShow['contentrating'] is not None): ContentRating.text = myShow['contentrating'] MPAARating.text = myShow['contentrating'] certification.text = myShow['contentrating'] MetadataType = etree.SubElement(tv_node, 'Type') MetadataType.text = 'Series' Overview = etree.SubElement(tv_node, 'Overview') if (myShow['overview'] is not None): Overview.text = myShow['overview'] PremiereDate = etree.SubElement(tv_node, 'PremiereDate') if (myShow['firstaired'] is not None): PremiereDate.text = myShow['firstaired'] Rating = etree.SubElement(tv_node, 'Rating') if (myShow['rating'] is not None): Rating.text = myShow['rating'] ProductionYear = etree.SubElement(tv_node, 'ProductionYear') if (myShow['firstaired'] is not None): try: year_text = str(datetime.datetime.strptime(myShow['firstaired'], '%Y-%m-%d').year) if year_text: ProductionYear.text = year_text except: pass RunningTime = etree.SubElement(tv_node, 'RunningTime') Runtime = etree.SubElement(tv_node, 'Runtime') if (myShow['runtime'] is not None): RunningTime.text = myShow['runtime'] Runtime.text = myShow['runtime'] IMDB_ID = etree.SubElement(tv_node, 'IMDB_ID') IMDB = etree.SubElement(tv_node, 'IMDB') IMDbId = etree.SubElement(tv_node, 'IMDbId') if (myShow['imdb_id'] is not None): IMDB_ID.text = myShow['imdb_id'] IMDB.text = myShow['imdb_id'] IMDbId.text = myShow['imdb_id'] Zap2ItId = etree.SubElement(tv_node, 'Zap2ItId') if (myShow['zap2it_id'] is not None): Zap2ItId.text = myShow['zap2it_id'] Genres = etree.SubElement(tv_node, 'Genres') if (myShow['genre'] is not None): for genre in myShow['genre'].split('|'): if (genre and genre.strip()): cur_genre = etree.SubElement(Genres, 'Genre') cur_genre.text = genre.strip() Genre = etree.SubElement(tv_node, 'Genre') if (myShow['genre'] is not None): Genre.text = '|'.join([x.strip() for x in myShow['genre'].split('|') if (x and x.strip())]) Studios = etree.SubElement(tv_node, 'Studios') Studio = etree.SubElement(Studios, 'Studio') if (myShow['network'] is not None): Studio.text = myShow['network'] Persons = etree.SubElement(tv_node, 'Persons') if (myShow['_actors'] is not None): for actor in myShow['_actors']: cur_actor_name_text = actor['name'] if ((cur_actor_name_text is not None) and cur_actor_name_text.strip()): cur_actor = etree.SubElement(Persons, 'Person') cur_actor_name = etree.SubElement(cur_actor, 'Name') cur_actor_name.text = cur_actor_name_text.strip() cur_actor_type = etree.SubElement(cur_actor, 'Type') cur_actor_type.text = 'Actor' cur_actor_role = etree.SubElement(cur_actor, 'Role') cur_actor_role_text = actor['role'] if (cur_actor_role_text is not None): cur_actor_role.text = cur_actor_role_text helpers.indentXML(tv_node) data = etree.ElementTree(tv_node) return data
'Creates an elementTree XML structure for a MediaBrowser style episode.xml and returns the resulting data object. show_obj: a TVShow instance to create the NFO for'
def _ep_data(self, ep_obj):
eps_to_write = ([ep_obj] + ep_obj.relatedEps) persons_dict = {} persons_dict['Director'] = [] persons_dict['GuestStar'] = [] persons_dict['Writer'] = [] tvdb_lang = ep_obj.show.lang try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if (tvdb_lang and (not (tvdb_lang == 'en'))): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms) myShow = t[ep_obj.show.tvdbid] except tvdb_exceptions.tvdb_shownotfound as e: raise exceptions.ShowNotFoundException(e.message) except tvdb_exceptions.tvdb_error as e: logger.log((u'Unable to connect to TVDB while creating meta files - skipping - ' + ex(e)), logger.ERROR) return False rootNode = etree.Element('Item') for curEpToWrite in eps_to_write: try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound): logger.log(((((u'Unable to find episode ' + str(curEpToWrite.season)) + 'x') + str(curEpToWrite.episode)) + ' on tvdb... has it been removed? Should I delete from db?')) return None if (curEpToWrite == ep_obj): if ((myEp['firstaired'] is None) and (ep_obj.season == 0)): myEp['firstaired'] = str(datetime.date.fromordinal(1)) if ((myEp['episodename'] is None) or (myEp['firstaired'] is None)): return None episode = rootNode EpisodeName = etree.SubElement(episode, 'EpisodeName') if (curEpToWrite.name is not None): EpisodeName.text = curEpToWrite.name else: EpisodeName.text = '' EpisodeNumber = etree.SubElement(episode, 'EpisodeNumber') EpisodeNumber.text = str(ep_obj.episode) if ep_obj.relatedEps: EpisodeNumberEnd = etree.SubElement(episode, 'EpisodeNumberEnd') EpisodeNumberEnd.text = str(curEpToWrite.episode) SeasonNumber = etree.SubElement(episode, 'SeasonNumber') SeasonNumber.text = str(curEpToWrite.season) if (not ep_obj.relatedEps): absolute_number = etree.SubElement(episode, 'absolute_number') absolute_number.text = myEp['absolute_number'] FirstAired = etree.SubElement(episode, 'FirstAired') if (curEpToWrite.airdate != datetime.date.fromordinal(1)): FirstAired.text = str(curEpToWrite.airdate) else: FirstAired.text = '' MetadataType = etree.SubElement(episode, 'Type') MetadataType.text = 'Episode' Overview = etree.SubElement(episode, 'Overview') if (curEpToWrite.description is not None): Overview.text = curEpToWrite.description else: Overview.text = '' if (not ep_obj.relatedEps): Rating = etree.SubElement(episode, 'Rating') rating_text = myEp['rating'] if (rating_text is not None): Rating.text = rating_text IMDB_ID = etree.SubElement(episode, 'IMDB_ID') IMDB = etree.SubElement(episode, 'IMDB') IMDbId = etree.SubElement(episode, 'IMDbId') if (myShow['imdb_id'] is not None): IMDB_ID.text = myShow['imdb_id'] IMDB.text = myShow['imdb_id'] IMDbId.text = myShow['imdb_id'] TvDbId = etree.SubElement(episode, 'TvDbId') TvDbId.text = str(curEpToWrite.tvdbid) Persons = etree.SubElement(episode, 'Persons') Language = etree.SubElement(episode, 'Language') Language.text = myEp['language'] thumb = etree.SubElement(episode, 'filename') thumb_text = self.get_episode_thumb_path(ep_obj) if thumb_text: thumb.text = thumb_text else: EpisodeNumberEnd.text = str(curEpToWrite.episode) if curEpToWrite.name: if (not EpisodeName.text): EpisodeName.text = curEpToWrite.name else: EpisodeName.text = ((EpisodeName.text + ', ') + curEpToWrite.name) if curEpToWrite.description: if (not Overview.text): Overview.text = curEpToWrite.description else: Overview.text = ((Overview.text + '\r') + curEpToWrite.description) if myEp['director']: persons_dict['Director'] += [x.strip() for x in myEp['director'].split('|') if (x and x.strip())] if myEp['gueststars']: persons_dict['GuestStar'] += [x.strip() for x in myEp['gueststars'].split('|') if (x and x.strip())] if myEp['writer']: persons_dict['Writer'] += [x.strip() for x in myEp['writer'].split('|') if (x and x.strip())] for (person_type, names) in persons_dict.iteritems(): names = list(set(names)) for cur_name in names: Person = etree.SubElement(Persons, 'Person') cur_person_name = etree.SubElement(Person, 'Name') cur_person_name.text = cur_name cur_person_type = etree.SubElement(Person, 'Type') cur_person_type.text = person_type helpers.indentXML(rootNode) data = etree.ElementTree(rootNode) return data
'Creates an elementTree XML structure for a MediaBrowser-style series.xml returns the resulting data object. show_obj: a TVShow instance to create the NFO for'
def _show_data(self, show_obj):
tvdb_lang = show_obj.lang ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if (tvdb_lang and (not (tvdb_lang == 'en'))): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms) rootNode = etree.Element('details') tv_node = etree.SubElement(rootNode, 'movie') tv_node.attrib['isExtra'] = 'false' tv_node.attrib['isSet'] = 'false' tv_node.attrib['isTV'] = 'true' try: myShow = t[int(show_obj.tvdbid)] except tvdb_exceptions.tvdb_shownotfound: logger.log(((u'Unable to find show with id ' + str(show_obj.tvdbid)) + ' on tvdb, skipping it'), logger.ERROR) raise except tvdb_exceptions.tvdb_error: logger.log(u"TVDB is down, can't use its data to make the NFO", logger.ERROR) raise try: if ((myShow['seriesname'] is None) or (myShow['seriesname'] == '') or (myShow['id'] is None) or (myShow['id'] == '')): logger.log(((u'Incomplete info for show with id ' + str(show_obj.tvdbid)) + ' on tvdb, skipping it'), logger.ERROR) return False except tvdb_exceptions.tvdb_attributenotfound: logger.log(((u'Incomplete info for show with id ' + str(show_obj.tvdbid)) + ' on tvdb, skipping it'), logger.ERROR) return False SeriesName = etree.SubElement(tv_node, 'title') if (myShow['seriesname'] is not None): SeriesName.text = myShow['seriesname'] else: SeriesName.text = '' Genres = etree.SubElement(tv_node, 'genres') if (myShow['genre'] is not None): for genre in myShow['genre'].split('|'): if (genre and genre.strip()): cur_genre = etree.SubElement(Genres, 'genre') cur_genre.text = genre.strip() FirstAired = etree.SubElement(tv_node, 'premiered') if (myShow['firstaired'] is not None): FirstAired.text = myShow['firstaired'] year = etree.SubElement(tv_node, 'year') if (myShow['firstaired'] is not None): try: year_text = str(datetime.datetime.strptime(myShow['firstaired'], '%Y-%m-%d').year) if year_text: year.text = year_text except: pass if (myShow['rating'] is not None): try: rating = int((float(myShow['rating']) * 10)) except ValueError: rating = 0 Rating = etree.SubElement(tv_node, 'rating') rating_text = str(rating) if (rating_text is not None): Rating.text = rating_text Status = etree.SubElement(tv_node, 'status') if (myShow['status'] is not None): Status.text = myShow['status'] mpaa = etree.SubElement(tv_node, 'mpaa') if (myShow['contentrating'] is not None): mpaa.text = myShow['contentrating'] if (myShow['imdb_id'] is not None): IMDB_ID = etree.SubElement(tv_node, 'id') IMDB_ID.attrib['moviedb'] = 'imdb' IMDB_ID.text = myShow['imdb_id'] if (myShow['zap2it_id'] is not None): zap2it_id = etree.SubElement(tv_node, 'id') zap2it_id.attrib['moviedb'] = 'zap2it' zap2it_id.text = myShow['zap2it_id'] tvdbid = etree.SubElement(tv_node, 'tvdbid') if (myShow['id'] is not None): tvdbid.text = myShow['id'] Runtime = etree.SubElement(tv_node, 'runtime') if (myShow['runtime'] is not None): Runtime.text = myShow['runtime'] cast = etree.SubElement(tv_node, 'cast') if (myShow['_actors'] is not None): for actor in myShow['_actors']: cur_actor_name_text = actor['name'] if ((cur_actor_name_text is not None) and cur_actor_name_text.strip()): cur_actor = etree.SubElement(cast, 'actor') cur_actor.text = cur_actor_name_text.strip() helpers.indentXML(rootNode) data = etree.ElementTree(rootNode) return data
'Creates an elementTree XML structure for a MediaBrowser style episode.xml and returns the resulting data object. show_obj: a TVShow instance to create the NFO for'
def _ep_data(self, ep_obj):
eps_to_write = ([ep_obj] + ep_obj.relatedEps) tvdb_lang = ep_obj.show.lang try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if (tvdb_lang and (not (tvdb_lang == 'en'))): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms) myShow = t[ep_obj.show.tvdbid] except tvdb_exceptions.tvdb_shownotfound as e: raise exceptions.ShowNotFoundException(e.message) except tvdb_exceptions.tvdb_error as e: logger.log((u'Unable to connect to TVDB while creating meta files - skipping - ' + ex(e)), logger.ERROR) return False rootNode = etree.Element('details') movie = etree.SubElement(rootNode, 'movie') movie.attrib['isExtra'] = 'false' movie.attrib['isSet'] = 'false' movie.attrib['isTV'] = 'true' for curEpToWrite in eps_to_write: try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound): logger.log(((((u'Unable to find episode ' + str(curEpToWrite.season)) + 'x') + str(curEpToWrite.episode)) + ' on tvdb... has it been removed? Should I delete from db?')) return None if (curEpToWrite == ep_obj): if ((myEp['firstaired'] is None) and (ep_obj.season == 0)): myEp['firstaired'] = str(datetime.date.fromordinal(1)) if ((myEp['episodename'] is None) or (myEp['firstaired'] is None)): return None episode = movie EpisodeName = etree.SubElement(episode, 'title') if (curEpToWrite.name is not None): EpisodeName.text = curEpToWrite.name else: EpisodeName.text = '' SeasonNumber = etree.SubElement(episode, 'season') SeasonNumber.text = str(curEpToWrite.season) EpisodeNumber = etree.SubElement(episode, 'episode') EpisodeNumber.text = str(ep_obj.episode) year = etree.SubElement(episode, 'year') if (myShow['firstaired'] is not None): try: year_text = str(datetime.datetime.strptime(myShow['firstaired'], '%Y-%m-%d').year) if year_text: year.text = year_text except: pass plot = etree.SubElement(episode, 'plot') if (myShow['overview'] is not None): plot.text = myShow['overview'] Overview = etree.SubElement(episode, 'episodeplot') if (curEpToWrite.description is not None): Overview.text = curEpToWrite.description else: Overview.text = '' mpaa = etree.SubElement(episode, 'mpaa') if (myShow['contentrating'] is not None): mpaa.text = myShow['contentrating'] if (not ep_obj.relatedEps): if (myEp['rating'] is not None): try: rating = int((float(myEp['rating']) * 10)) except ValueError: rating = 0 Rating = etree.SubElement(episode, 'rating') rating_text = str(rating) if (rating_text is not None): Rating.text = rating_text director = etree.SubElement(episode, 'director') director_text = myEp['director'] if (director_text is not None): director.text = director_text credits = etree.SubElement(episode, 'credits') credits_text = myEp['writer'] if (credits_text is not None): credits.text = credits_text cast = etree.SubElement(episode, 'cast') if (myShow['_actors'] is not None): for actor in myShow['_actors']: cur_actor_name_text = actor['name'] if ((cur_actor_name_text is not None) and cur_actor_name_text.strip()): cur_actor = etree.SubElement(cast, 'actor') cur_actor.text = cur_actor_name_text.strip() else: if curEpToWrite.name: if (not EpisodeName.text): EpisodeName.text = curEpToWrite.name else: EpisodeName.text = ((EpisodeName.text + ', ') + curEpToWrite.name) if curEpToWrite.description: if (not Overview.text): Overview.text = curEpToWrite.description else: Overview.text = ((Overview.text + '\r') + curEpToWrite.description) helpers.indentXML(rootNode) data = etree.ElementTree(rootNode) return data
'Returns the path where the episode thumbnail should be stored. Defaults to the same path as the episode file but with a .metathumb extension. ep_obj: a TVEpisode instance for which to create the thumbnail'
def get_episode_thumb_path(self, ep_obj):
if ek.ek(os.path.isfile, ep_obj.location): tbn_filename = helpers.replaceExtension(ep_obj.location, 'metathumb') else: return None return tbn_filename
'Season thumbs for WDTV go in Show Dir/Season X/folder.jpg If no season folder exists, None is returned'
def get_season_poster_path(self, show_obj, season):
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\\s+(\\d+)$' season_dir = None for cur_dir in dir_list: if ((season == 0) and (cur_dir == 'Specials')): season_dir = cur_dir break match = re.match(season_dir_regex, cur_dir, re.I) if (not match): continue cur_season = int(match.group(1)) if (cur_season == season): season_dir = cur_dir break if (not season_dir): logger.log((u'Unable to find a season dir for season ' + str(season)), logger.DEBUG) return None logger.log((((u'Using ' + str(season_dir)) + '/folder.jpg as season dir for season ') + str(season)), logger.DEBUG) return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg')
'Creates an elementTree XML structure for a WDTV style episode.xml and returns the resulting data object. ep_obj: a TVShow instance to create the NFO for'
def _ep_data(self, ep_obj):
eps_to_write = ([ep_obj] + ep_obj.relatedEps) tvdb_lang = ep_obj.show.lang try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if (tvdb_lang and (not (tvdb_lang == 'en'))): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms) myShow = t[ep_obj.show.tvdbid] except tvdb_exceptions.tvdb_shownotfound as e: raise exceptions.ShowNotFoundException(e.message) except tvdb_exceptions.tvdb_error as e: logger.log((u'Unable to connect to TVDB while creating meta files - skipping - ' + ex(e)), logger.ERROR) return False rootNode = etree.Element('details') for curEpToWrite in eps_to_write: try: myEp = myShow[curEpToWrite.season][curEpToWrite.episode] except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound): logger.log(((((u'Unable to find episode ' + str(curEpToWrite.season)) + 'x') + str(curEpToWrite.episode)) + ' on tvdb... has it been removed? Should I delete from db?')) return None if ((myEp['firstaired'] is None) and (ep_obj.season == 0)): myEp['firstaired'] = str(datetime.date.fromordinal(1)) if ((myEp['episodename'] is None) or (myEp['firstaired'] is None)): return None if (len(eps_to_write) > 1): episode = etree.SubElement(rootNode, 'details') else: episode = rootNode episodeID = etree.SubElement(episode, 'id') episodeID.text = str(curEpToWrite.tvdbid) title = etree.SubElement(episode, 'title') title.text = ep_obj.prettyName() seriesName = etree.SubElement(episode, 'series_name') if (myShow['seriesname'] is not None): seriesName.text = myShow['seriesname'] episodeName = etree.SubElement(episode, 'episode_name') if (curEpToWrite.name is not None): episodeName.text = curEpToWrite.name seasonNumber = etree.SubElement(episode, 'season_number') seasonNumber.text = str(curEpToWrite.season) episodeNum = etree.SubElement(episode, 'episode_number') episodeNum.text = str(curEpToWrite.episode) firstAired = etree.SubElement(episode, 'firstaired') if (curEpToWrite.airdate != datetime.date.fromordinal(1)): firstAired.text = str(curEpToWrite.airdate) year = etree.SubElement(episode, 'year') if (myShow['firstaired'] is not None): try: year_text = str(datetime.datetime.strptime(myShow['firstaired'], '%Y-%m-%d').year) if year_text: year.text = year_text except: pass runtime = etree.SubElement(episode, 'runtime') if (curEpToWrite.season != 0): if (myShow['runtime'] is not None): runtime.text = myShow['runtime'] genre = etree.SubElement(episode, 'genre') if (myShow['genre'] is not None): genre.text = ' / '.join([x.strip() for x in myShow['genre'].split('|') if (x and x.strip())]) director = etree.SubElement(episode, 'director') director_text = myEp['director'] if (director_text is not None): director.text = director_text if (myShow['_actors'] is not None): for actor in myShow['_actors']: cur_actor_name_text = actor['name'] if ((cur_actor_name_text is not None) and cur_actor_name_text.strip()): cur_actor = etree.SubElement(episode, 'actor') cur_actor_name = etree.SubElement(cur_actor, 'name') cur_actor_name.text = cur_actor_name_text.strip() cur_actor_role = etree.SubElement(cur_actor, 'role') cur_actor_role_text = actor['role'] if (cur_actor_role_text is not None): cur_actor_role.text = cur_actor_role_text overview = etree.SubElement(episode, 'overview') if (curEpToWrite.description is not None): overview.text = curEpToWrite.description helpers.indentXML(rootNode) data = etree.ElementTree(rootNode) return data
'Initializes a config migrator that can take the config from the version indicated in the config file up to the version required by SB'
def __init__(self, config_obj):
self.config_obj = config_obj self.config_version = check_setting_int(config_obj, 'General', 'config_version', sickbeard.CONFIG_VERSION) self.expected_config_version = sickbeard.CONFIG_VERSION self.migration_names = {1: 'Custom naming', 2: 'Sync backup number with version number', 3: 'Rename omgwtfnzb variables', 4: 'Add newznab catIDs', 5: 'Metadata update'}
'Calls each successive migration until the config is the same version as SB expects'
def migrate_config(self):
if (self.config_version > self.expected_config_version): logger.log_error_and_exit((((((u'Your config version (' + str(self.config_version)) + ') has been incremented past what this version of Sick Beard supports (') + str(self.expected_config_version)) + ').\n') + 'If you have used other forks or a newer version of Sick Beard, your config file may be unusable due to their modifications.')) sickbeard.CONFIG_VERSION = self.config_version while (self.config_version < self.expected_config_version): next_version = (self.config_version + 1) if (next_version in self.migration_names): migration_name = (': ' + self.migration_names[next_version]) else: migration_name = '' logger.log(u'Backing up config before upgrade') if (not helpers.backupVersionedFile(sickbeard.CONFIG_FILE, self.config_version)): logger.log_error_and_exit(u'Config backup failed, abort upgrading config') else: logger.log(u'Proceeding with upgrade') logger.log(((u'Migrating config up to version ' + str(next_version)) + migration_name)) getattr(self, ('_migrate_v' + str(next_version)))() self.config_version = next_version sickbeard.CONFIG_VERSION = self.config_version logger.log(u'Saving config file to disk') sickbeard.save_config()
'Reads in the old naming settings from your config and generates a new config template from them.'
def _migrate_v1(self):
sickbeard.NAMING_PATTERN = self._name_to_pattern() logger.log((u"Based on your old settings I'm setting your new naming pattern to: " + sickbeard.NAMING_PATTERN)) sickbeard.NAMING_CUSTOM_ABD = bool(check_setting_int(self.config_obj, 'General', 'naming_dates', 0)) if sickbeard.NAMING_CUSTOM_ABD: sickbeard.NAMING_ABD_PATTERN = self._name_to_pattern(True) logger.log((u'Adding a custom air-by-date naming pattern to your config: ' + sickbeard.NAMING_ABD_PATTERN)) else: sickbeard.NAMING_ABD_PATTERN = naming.name_abd_presets[0] sickbeard.NAMING_MULTI_EP = int(check_setting_int(self.config_obj, 'General', 'naming_multi_ep_type', 1)) myDB = db.DBConnection() season_folder_shows = myDB.select('SELECT * FROM tv_shows WHERE flatten_folders = 0') if season_folder_shows: old_season_format = check_setting_str(self.config_obj, 'General', 'season_folders_format', 'Season %02d') if old_season_format: try: new_season_format = (old_season_format % 9) new_season_format = new_season_format.replace('09', '%0S') new_season_format = new_season_format.replace('9', '%S') logger.log(((((u'Changed season folder format from ' + old_season_format) + ' to ') + new_season_format) + ', prepending it to your naming config')) sickbeard.NAMING_PATTERN = ((new_season_format + os.sep) + sickbeard.NAMING_PATTERN) except (TypeError, ValueError): logger.log(((u"Can't change " + old_season_format) + ' to new season format'), logger.ERROR) else: logger.log(u"No shows were using season folders before so I'm disabling flattening on all shows") myDB.action('UPDATE tv_shows SET flatten_folders = 0') sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
'Reads in the old naming settings from your config and generates a new config template from them.'
def _migrate_v3(self):
sickbeard.OMGWTFNZBS_USERNAME = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '') sickbeard.OMGWTFNZBS_APIKEY = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '')
'Update newznab providers so that the category IDs can be set independently via the config'
def _migrate_v4(self):
new_newznab_data = [] old_newznab_data = check_setting_str(self.config_obj, 'Newznab', 'newznab_data', '') if old_newznab_data: old_newznab_data_list = old_newznab_data.split('!!!') for cur_provider_data in old_newznab_data_list: try: (name, url, key, enabled) = cur_provider_data.split('|') except ValueError: logger.log(((u"Skipping Newznab provider string: '" + cur_provider_data) + "', incorrect format"), logger.ERROR) continue if (name == 'Sick Beard Index'): key = '0' if (name == 'NZBs.org'): catIDs = '5030,5040,5070,5090' else: catIDs = '5030,5040' cur_provider_data_list = [name, url, key, catIDs, enabled] new_newznab_data.append('|'.join(cur_provider_data_list)) sickbeard.NEWZNAB_DATA = '!!!'.join(new_newznab_data)
'Updates metadata values to the new format'
def _migrate_v5(self):
' Quick overview of what the upgrade does:\n\n new | old | description (new)\n ----+-----+--------------------\n 1 | 1 | show metadata\n 2 | 2 | episode metadata\n 3 | 4 | show fanart\n 4 | 3 | show poster\n 5 | - | show banner\n 6 | 5 | episode thumb\n 7 | 6 | season poster\n 8 | - | season banner\n 9 | - | season all poster\n 10 | - | season all banner\n\n Note that the ini places start at 1 while the list index starts at 0.\n old format: 0|0|0|0|0|0 -- 6 places\n new format: 0|0|0|0|0|0|0|0|0|0 -- 10 places\n\n Drop the use of use_banner option.\n Migrate the poster override to just using the banner option (applies to xbmc only).\n ' metadata_xbmc = check_setting_str(self.config_obj, 'General', 'metadata_xbmc', '0|0|0|0|0|0') metadata_xbmc_12plus = check_setting_str(self.config_obj, 'General', 'metadata_xbmc_12plus', '0|0|0|0|0|0') metadata_mediabrowser = check_setting_str(self.config_obj, 'General', 'metadata_mediabrowser', '0|0|0|0|0|0') metadata_ps3 = check_setting_str(self.config_obj, 'General', 'metadata_ps3', '0|0|0|0|0|0') metadata_wdtv = check_setting_str(self.config_obj, 'General', 'metadata_wdtv', '0|0|0|0|0|0') metadata_tivo = check_setting_str(self.config_obj, 'General', 'metadata_tivo', '0|0|0|0|0|0') metadata_mede8er = check_setting_str(self.config_obj, 'General', 'metadata_mede8er', '0|0|0|0|0|0') use_banner = bool(check_setting_int(self.config_obj, 'General', 'use_banner', 0)) def _migrate_metadata(metadata, metadata_name, use_banner): cur_metadata = metadata.split('|') if (len(cur_metadata) == 6): logger.log((((u'Upgrading ' + metadata_name) + ' metadata, old value: ') + metadata)) cur_metadata.insert(4, '0') cur_metadata.append('0') cur_metadata.append('0') cur_metadata.append('0') (cur_metadata[3], cur_metadata[2]) = (cur_metadata[2], cur_metadata[3]) if ((metadata_name == 'XBMC') and use_banner): (cur_metadata[4], cur_metadata[3]) = (cur_metadata[3], '0') metadata = '|'.join(cur_metadata) logger.log((((u'Upgrading ' + metadata_name) + ' metadata, new value: ') + metadata)) elif (len(cur_metadata) == 10): metadata = '|'.join(cur_metadata) logger.log((((u'Keeping ' + metadata_name) + ' metadata, value: ') + metadata)) else: logger.log(((((u'Skipping ' + metadata_name) + " metadata: '") + metadata) + "', incorrect format"), logger.ERROR) metadata = '0|0|0|0|0|0|0|0|0|0' logger.log((((u'Setting ' + metadata_name) + ' metadata, new value: ') + metadata)) return metadata sickbeard.METADATA_XBMC = _migrate_metadata(metadata_xbmc, 'XBMC', use_banner) sickbeard.METADATA_XBMC_12PLUS = _migrate_metadata(metadata_xbmc_12plus, 'XBMC 12+', use_banner) sickbeard.METADATA_MEDIABROWSER = _migrate_metadata(metadata_mediabrowser, 'MediaBrowser', use_banner) sickbeard.METADATA_PS3 = _migrate_metadata(metadata_ps3, 'PS3', use_banner) sickbeard.METADATA_WDTV = _migrate_metadata(metadata_wdtv, 'WDTV', use_banner) sickbeard.METADATA_TIVO = _migrate_metadata(metadata_tivo, 'TIVO', use_banner) sickbeard.METADATA_MEDE8ER = _migrate_metadata(metadata_mede8er, 'Mede8er', use_banner)
'Updates Synology notifier to reflect that their now is an update library option instead misusing the enable option'
def _migrate_v6(self):
sickbeard.SYNOINDEX_UPDATE_LIBRARY = bool(check_setting_int(self.config_obj, 'Synology', 'use_synoindex', 0))
'Returns XBMC JSON-RPC API version (odd # = dev, even # = stable) Sends a request to the XBMC host using the JSON-RPC to determine if the legacy API or if the JSON-RPC API functions should be used. Fallback to testing legacy HTTPAPI before assuming it is just a badly configured host. Args: host: XBMC webserver host:port username: XBMC webserver username password: XBMC webserver password Returns: Returns API number or False List of possible known values: API | XBMC Version 2 | v10 (Dharma) 3 | (pre Eden) 4 | v11 (Eden) 5 | (pre Frodo) 6 | v12 (Frodo) / v13 (Gotham)'
def _get_xbmc_version(self, host, username, password):
socket.setdefaulttimeout(10) checkCommand = '{"jsonrpc":"2.0","method":"JSONRPC.Version","id":1}' result = self._send_to_xbmc_json(checkCommand, host, username, password) socket.setdefaulttimeout(sickbeard.SOCKET_TIMEOUT) if result: return result['result']['version'] else: testCommand = {'command': 'Help'} request = self._send_to_xbmc(testCommand, host, username, password) if request: return 1 else: return False
'Internal wrapper for the notify_snatch and notify_download functions Detects JSON-RPC version then branches the logic for either the JSON-RPC or legacy HTTP API methods. Args: message: Message body of the notice to send title: Title of the notice to send host: XBMC webserver host:port username: XBMC webserver username password: XBMC webserver password force: Used for the Test method to override config safety checks Returns: Returns a list results in the format of host:ip:result The result will either be \'OK\' or False, this is used to be parsed by the calling function.'
def _notify(self, message, title='Sick Beard', host=None, username=None, password=None, force=False):
if ((not sickbeard.USE_XBMC) and (not force)): return False if (not host): host = sickbeard.XBMC_HOST if (not username): username = sickbeard.XBMC_USERNAME if (not password): password = sickbeard.XBMC_PASSWORD result = '' for curHost in [x.strip() for x in host.split(',')]: logger.log((((u"XBMC: Sending XBMC notification to '" + curHost) + "' - ") + message), logger.MESSAGE) xbmcapi = self._get_xbmc_version(curHost, username, password) if xbmcapi: if (xbmcapi <= 4): logger.log(u'XBMC: Detected XBMC version <= 11, using XBMC HTTP API', logger.DEBUG) command = {'command': 'ExecBuiltIn', 'parameter': (((('Notification(' + title.encode('utf-8')) + ',') + message.encode('utf-8')) + ')')} notifyResult = self._send_to_xbmc(command, curHost, username, password) if notifyResult: result += ((curHost + ':') + str(notifyResult)) else: logger.log(u'XBMC: Detected XBMC version >= 12, using XBMC JSON API', logger.DEBUG) command = ('{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % (title.encode('utf-8'), message.encode('utf-8'), self.sb_logo_url)) notifyResult = self._send_to_xbmc_json(command, curHost, username, password) if notifyResult: result += ((curHost + ':') + notifyResult['result'].decode(sickbeard.SYS_ENCODING)) else: if (sickbeard.XBMC_ALWAYS_ON or force): logger.log(((u"XBMC: Failed to detect XBMC version for '" + curHost) + "', check configuration and try again."), logger.ERROR) result += (curHost + ':False') return result
'Handles communication to XBMC servers via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the XBMC API via HTTP host: XBMC webserver host:port username: XBMC webserver username password: XBMC webserver password Returns: Returns response.result for successful commands or False if there was an error'
def _send_to_xbmc(self, command, host=None, username=None, password=None):
if (not username): username = sickbeard.XBMC_USERNAME if (not password): password = sickbeard.XBMC_PASSWORD if (not host): logger.log(u'XBMC: No host specified, check your settings', logger.DEBUG) return False for key in command: if (type(command[key]) == unicode): command[key] = command[key].encode('utf-8') enc_command = urllib.urlencode(command) logger.log((u'XBMC: Encoded API command: ' + enc_command), logger.DEBUG) url = ('http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command)) try: req = urllib2.Request(url) if password: pw_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() pw_mgr.add_password(None, url, username, password) else: pw_mgr = None result = sickbeard.helpers.getURL(req, password_mgr=pw_mgr, throw_exc=True) logger.log((u'XBMC: HTTP response: ' + result.replace('\n', '')), logger.DEBUG) return result except (urllib2.URLError, IOError) as e: logger.log((((u'XBMC: Could not contact XBMC HTTP at ' + fixStupidEncodings(url)) + ' ') + ex(e)), logger.WARNING) except Exception as e: logger.log((((u'XBMC: Exception occurred while trying to access ' + fixStupidEncodings(url)) + ' ') + ex(e)), logger.WARNING) return False
'Handles updating XBMC host via HTTP API Attempts to update the XBMC video library for a specific tv show if passed, otherwise update the whole library if enabled. Args: host: XBMC webserver host:port showName: Name of a TV show to specifically target the library update for Returns: Returns True or False'
def _update_library(self, host=None, showName=None):
if (not host): logger.log(u'XBMC: No host specified, check your settings', logger.DEBUG) return False if showName: logger.log((u'XBMC: Updating library via HTTP method for show ' + showName), logger.MESSAGE) pathSql = ('select path.strPath from path, tvshow, tvshowlinkpath where tvshow.c00 = "%s" and tvshowlinkpath.idShow = tvshow.idShow and tvshowlinkpath.idPath = path.idPath' % showName) xmlCommand = {'command': 'SetResponseFormat(webheader;false;webfooter;false;header;<xml>;footer;</xml>;opentag;<tag>;closetag;</tag>;closefinaltag;false)'} sqlCommand = {'command': ('QueryVideoDatabase(%s)' % pathSql)} resetCommand = {'command': 'SetResponseFormat()'} request = self._send_to_xbmc(xmlCommand, host) if (not request): return False sqlXML = self._send_to_xbmc(sqlCommand, host) request = self._send_to_xbmc(resetCommand, host) if (not sqlXML): logger.log((((u'XBMC: Invalid response for ' + showName) + ' on ') + host), logger.DEBUG) return False encSqlXML = urllib.quote(sqlXML, ':\\/<>') try: et = etree.fromstring(encSqlXML) except SyntaxError as e: logger.log((u'XBMC: Unable to parse XML returned from XBMC: ' + ex(e)), logger.ERROR) return False paths = et.findall('.//field') if (not paths): logger.log((((u'XBMC: No valid paths found for ' + showName) + ' on ') + host), logger.DEBUG) return False for path in paths: unEncPath = urllib.unquote(path.text).decode(sickbeard.SYS_ENCODING) logger.log((((((u'XBMC: Updating ' + showName) + ' on ') + host) + ' at ') + unEncPath), logger.MESSAGE) updateCommand = {'command': 'ExecBuiltIn', 'parameter': ('XBMC.updatelibrary(video, %s)' % unEncPath)} request = self._send_to_xbmc(updateCommand, host) if (not request): logger.log((((((u'XBMC: Update of show directory failed on ' + showName) + ' on ') + host) + ' at ') + unEncPath), logger.WARNING) return False if (len(paths) > 1): time.sleep(5) else: logger.log((u'XBMC: Doing Full Library update via HTTP method for host: ' + host), logger.MESSAGE) updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'XBMC.updatelibrary(video)'} request = self._send_to_xbmc(updateCommand, host) if (not request): logger.log((u'XBMC: Full Library update failed on: ' + host), logger.ERROR) return False return True
'Handles communication to XBMC servers via JSONRPC Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the XBMC JSON-RPC via HTTP host: XBMC webserver host:port username: XBMC webserver username password: XBMC webserver password Returns: Returns response.result for successful commands or False if there was an error'
def _send_to_xbmc_json(self, command, host=None, username=None, password=None):
if (not username): username = sickbeard.XBMC_USERNAME if (not password): password = sickbeard.XBMC_PASSWORD if (not host): logger.log(u'XBMC: No host specified, check your settings', logger.DEBUG) return False command = command.encode('utf-8') logger.log((u'XBMC: JSON command: ' + command), logger.DEBUG) url = ('http://%s/jsonrpc' % host) try: req = urllib2.Request(url, command) req.add_header('Content-type', 'application/json') if password: pw_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() pw_mgr.add_password(None, url, username, password) else: pw_mgr = None response = sickbeard.helpers.getURL(req, password_mgr=pw_mgr, throw_exc=True) result = json.loads(response) logger.log((u'XBMC: JSON response: ' + str(result)), logger.DEBUG) return result except ValueError as e: logger.log((u'XBMC: Unable to decode JSON: ' + response), logger.WARNING) except urllib2.URLError as e: logger.log((((u'XBMC: Error while trying to retrieve XBMC API version for ' + host) + ': ') + ex(e)), logger.WARNING) except IOError as e: logger.log((((u'XBMC: Could not contact XBMC JSON API at ' + fixStupidEncodings(url)) + ' ') + ex(e)), logger.WARNING) except Exception as e: logger.log((((u'XBMC: Exception occurred while trying to access ' + fixStupidEncodings(url)) + ' ') + ex(e)), logger.WARNING) return False
'Handles updating XBMC host via HTTP JSON-RPC Attempts to update the XBMC video library for a specific tv show if passed, otherwise update the whole library if enabled. Args: host: XBMC webserver host:port showName: Name of a TV show to specifically target the library update for Returns: Returns True or False'
def _update_library_json(self, host=None, showName=None):
if (not host): logger.log(u'XBMC: No host specified, check your settings', logger.DEBUG) return False if showName: tvshowid = (-1) logger.log((u'XBMC: Updating library via JSON method for show ' + showName), logger.MESSAGE) showsCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShows","id":1}' showsResponse = self._send_to_xbmc_json(showsCommand, host) if (showsResponse and ('result' in showsResponse) and ('tvshows' in showsResponse['result'])): shows = showsResponse['result']['tvshows'] else: logger.log(u'XBMC: No tvshows in XBMC TV show list', logger.DEBUG) return False for show in shows: if (show['label'] == showName): tvshowid = show['tvshowid'] break del shows if (tvshowid == (-1)): logger.log(u'XBMC: Exact show name not matched in XBMC TV show list', logger.DEBUG) return False pathCommand = ('{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % tvshowid) pathResponse = self._send_to_xbmc_json(pathCommand, host) path = pathResponse['result']['tvshowdetails']['file'] logger.log((((((u'XBMC: Received Show: ' + show['label']) + ' with ID: ') + str(tvshowid)) + ' Path: ') + path), logger.DEBUG) if (len(path) < 1): logger.log((((((u'XBMC: No valid path found for ' + showName) + ' with ID: ') + str(tvshowid)) + ' on ') + host), logger.WARNING) return False logger.log((((((u'XBMC: Updating ' + showName) + ' on ') + host) + ' at ') + path), logger.MESSAGE) updateCommand = ('{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % json.dumps(path)) request = self._send_to_xbmc_json(updateCommand, host) if (not request): logger.log((((((u'XBMC: Update of show directory failed on ' + showName) + ' on ') + host) + ' at ') + path), logger.WARNING) return False for r in request: if ('error' in r): logger.log((((((u'XBMC: Error while attempting to update show directory for ' + showName) + ' on ') + host) + ' at ') + path), logger.ERROR) return False else: logger.log((u'XBMC: Doing Full Library update via JSON method for host: ' + host), logger.MESSAGE) updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","id":1}' request = self._send_to_xbmc_json(updateCommand, host, sickbeard.XBMC_USERNAME, sickbeard.XBMC_PASSWORD) if (not request): logger.log((u'XBMC: Full Library update failed on: ' + host), logger.ERROR) return False return True
'Public wrapper for the update library functions to branch the logic for JSON-RPC or legacy HTTP API Checks the XBMC API version to branch the logic to call either the legacy HTTP API or the newer JSON-RPC over HTTP methods. Do the ability of accepting a list of hosts delimited by comma, we split off the first host to send the update to. This is a workaround for SQL backend users as updating multiple clients causes duplicate entries. Future plan is to revisit how we store the host/ip/username/pw/options so that it may be more flexible. Args: showName: Name of a TV show to specifically target the library update for Returns: Returns True or False'
def update_library(self, ep_obj=None, show_obj=None):
if ep_obj: showName = ep_obj.show.name elif show_obj: showName = show_obj.name else: showName = None if (sickbeard.USE_XBMC and sickbeard.XBMC_UPDATE_LIBRARY): if (not sickbeard.XBMC_HOST): logger.log(u'XBMC: No host specified, check your settings', logger.DEBUG) return False if sickbeard.XBMC_UPDATE_ONLYFIRST: host = sickbeard.XBMC_HOST.split(',')[0].strip() else: host = sickbeard.XBMC_HOST result = 0 for curHost in [x.strip() for x in host.split(',')]: logger.log(((u"XBMC: Sending request to update library for host: '" + curHost) + "'"), logger.MESSAGE) xbmcapi = self._get_xbmc_version(curHost, sickbeard.XBMC_USERNAME, sickbeard.XBMC_PASSWORD) if xbmcapi: if (xbmcapi <= 4): if (not self._update_library(curHost, showName)): if (showName and sickbeard.XBMC_UPDATE_FULL): self._update_library(curHost) elif (not self._update_library_json(curHost, showName)): if (showName and sickbeard.XBMC_UPDATE_FULL): self._update_library_json(curHost) else: if sickbeard.XBMC_ALWAYS_ON: logger.log(((u"XBMC: Failed to detect XBMC version for '" + curHost) + "', check configuration and try again."), logger.ERROR) result = (result + 1) if (result == 0): return True else: return False
'Handles communication to Plex hosts via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the legacy xbmcCmds HTTP API host: Plex host:port username: Plex API username password: Plex API password Returns: Returns \'OK\' for successful commands or False if there was an error'
def _send_to_plex(self, command, host, username=None, password=None):
if (not username): username = sickbeard.PLEX_USERNAME if (not password): password = sickbeard.PLEX_PASSWORD if (not host): logger.log(u'PLEX: No host specified, check your settings', logger.ERROR) return False for key in command: if (type(command[key]) == unicode): command[key] = command[key].encode('utf-8') enc_command = urllib.urlencode(command) logger.log((u'PLEX: Encoded API command: ' + enc_command), logger.DEBUG) url = ('http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command)) try: req = urllib2.Request(url) if password: pw_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() pw_mgr.add_password(None, url, username, password) else: pw_mgr = None result = sickbeard.helpers.getURL(req, password_mgr=pw_mgr) logger.log((u'PLEX: HTTP response: ' + result.replace('\n', '')), logger.DEBUG) return 'OK' except (urllib2.URLError, IOError) as e: logger.log((((u"PLEX: Warning: Couldn't contact Plex at " + fixStupidEncodings(url)) + ' ') + ex(e)), logger.WARNING) return False
'Internal wrapper for the notify_snatch and notify_download functions Args: message: Message body of the notice to send title: Title of the notice to send host: Plex Media Client(s) host:port username: Plex username password: Plex password force: Used for the Test method to override config safety checks Returns: Returns a list results in the format of host:ip:result The result will either be \'OK\' or False, this is used to be parsed by the calling function.'
def _notify(self, message, title='Sick Beard', host=None, username=None, password=None, force=False):
if ((not sickbeard.USE_PLEX) and (not force)): return False if (not host): host = sickbeard.PLEX_HOST if (not username): username = sickbeard.PLEX_USERNAME if (not password): password = sickbeard.PLEX_PASSWORD result = '' for curHost in [x.strip() for x in host.split(',')]: logger.log((((u"PLEX: Sending notification to '" + curHost) + "' - ") + message), logger.MESSAGE) command = {'command': 'ExecBuiltIn', 'parameter': (((('Notification(' + title.encode('utf-8')) + ',') + message.encode('utf-8')) + ')')} notifyResult = self._send_to_plex(command, curHost, username, password) if notifyResult: result += ((curHost + ':') + str(notifyResult)) return result
'Handles updating the Plex Media Server host via HTTP API Plex Media Server currently only supports updating the whole video library and not a specific path. Returns: Returns True or False'
def update_library(self, ep_obj=None, host=None, username=None, password=None):
if (not host): host = sickbeard.PLEX_SERVER_HOST if (not username): username = sickbeard.PLEX_USERNAME if (not password): password = sickbeard.PLEX_PASSWORD if (sickbeard.USE_PLEX and sickbeard.PLEX_UPDATE_LIBRARY): if (not sickbeard.PLEX_SERVER_HOST): logger.log(u'PLEX: No Plex Media Server host specified, check your settings', logger.DEBUG) return False logger.log((u'PLEX: Updating library for the Plex Media Server host: ' + host), logger.MESSAGE) token_arg = '' if (username and password): logger.log((u'PLEX: fetching credentials for Plex user: ' + username), logger.DEBUG) url = 'https://plex.tv/users/sign_in.xml' req = urllib2.Request(url, data='') pw_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() pw_mgr.add_password(None, url, username, password) req.add_header('X-Plex-Product', 'Sick Beard Notifier') req.add_header('X-Plex-Client-Identifier', '5f48c063eaf379a565ff56c9bb2b401e') req.add_header('X-Plex-Version', '1.0') try: response = sickbeard.helpers.getURL(req, throw_exc=True) auth_tree = etree.fromstring(response) token = auth_tree.findall('.//authentication-token')[0].text token_arg = ('?X-Plex-Token=' + token) except urllib2.URLError as e: logger.log((u'PLEX: Error fetching credentials from from plex.tv for user %s: %s' % (username, ex(e))), logger.MESSAGE) except (ValueError, IndexError) as e: logger.log((u'PLEX: Error parsing plex.tv response: ' + ex(e)), logger.MESSAGE) url = ('http://%s/library/sections%s' % (sickbeard.PLEX_SERVER_HOST, token_arg)) try: xml_tree = etree.fromstring(sickbeard.helpers.getURL(url)) media_container = xml_tree.getroot() except IOError as e: logger.log((u'PLEX: Error while trying to contact Plex Media Server: ' + ex(e)), logger.ERROR) return False sections = media_container.findall('.//Directory') if (not sections): logger.log((u'PLEX: Plex Media Server not running on: ' + sickbeard.PLEX_SERVER_HOST), logger.MESSAGE) return False for section in sections: if (section.attrib['type'] == 'show'): url = ('http://%s/library/sections/%s/refresh%s' % (sickbeard.PLEX_SERVER_HOST, section.attrib['key'], token_arg)) if (sickbeard.helpers.getURLFileLike(url) is None): logger.log(u'PLEX: Error updating library section for Plex Media Server', logger.ERROR) return False return True
'Retrieves the settings from a NMJ/Popcorn Hour host: The hostname/IP of the Popcorn Hour server Returns: True if the settings were retrieved successfully, False otherwise'
def notify_settings(self, host):
terminal = False try: terminal = telnetlib.Telnet(host) except Exception: logger.log((u'NMJ: Unable to get a telnet session to %s' % host), logger.WARNING) return False logger.log((u'NMJ: Connected to %s via telnet' % host), logger.DEBUG) terminal.read_until('sh-3.00# ') terminal.write('cat /tmp/source\n') terminal.write('cat /tmp/netshare\n') terminal.write('exit\n') tnoutput = terminal.read_all() database = '' device = '' match = re.search('(.+\\.db)\\r\\n?(.+)(?=sh-3.00# cat /tmp/netshare)', tnoutput) if match: database = match.group(1) device = match.group(2) logger.log((u'NMJ: Found NMJ database %s on device %s' % (database, device)), logger.DEBUG) sickbeard.NMJ_DATABASE = database else: logger.log((u'NMJ: Could not get current NMJ database on %s, NMJ is probably not running!' % host), logger.WARNING) return False if device.startswith('NETWORK_SHARE/'): match = re.search(('.*(?=\r\n?%s)' % re.escape(device[14:])), tnoutput) if match: mount = match.group().replace('127.0.0.1', host) logger.log((u'NMJ: Found mounting url on the Popcorn Hour in configuration: %s' % mount), logger.DEBUG) sickbeard.NMJ_MOUNT = mount else: logger.log(u'NMJ: Detected a network share on the Popcorn Hour, but could not get the mounting url', logger.WARNING) return False return True
'Sends a NMJ update command to the specified machine host: The hostname/IP to send the request to (no port) database: The database to send the request to mount: The mount URL to use (optional) Returns: True if the request succeeded, False otherwise'
def _sendNMJ(self, host, database, mount=None):
if mount: try: req = urllib2.Request(mount) logger.log((u'NMJ: Try to mount network drive via url: %s' % mount), logger.DEBUG) sickbeard.helpers.getURLFileLike(req) except IOError as e: if hasattr(e, 'reason'): logger.log((u'NMJ: Could not contact Popcorn Hour on host %s: %s' % (host, e.reason)), logger.WARNING) elif hasattr(e, 'code'): logger.log((u'NMJ: Problem with Popcorn Hour on host %s: %s' % (host, e.code)), logger.WARNING) return False except Exception as e: logger.log((u'NMJ: Unknown exception: ' + ex(e)), logger.ERROR) return False UPDATE_URL = 'http://%(host)s:8008/metadata_database?%(params)s' params = {'arg0': 'scanner_start', 'arg1': database, 'arg2': 'background', 'arg3': ''} params = urllib.urlencode(params) updateUrl = (UPDATE_URL % {'host': host, 'params': params}) try: req = urllib2.Request(updateUrl) logger.log((u'NMJ: Sending NMJ scan update command via url: %s' % updateUrl), logger.DEBUG) response = sickbeard.helpers.getURL(req) except IOError as e: if hasattr(e, 'reason'): logger.log((u'NMJ: Could not contact Popcorn Hour on host %s: %s' % (host, e.reason)), logger.WARNING) elif hasattr(e, 'code'): logger.log((u'NMJ: Problem with Popcorn Hour on host %s: %s' % (host, e.code)), logger.WARNING) return False except Exception as e: logger.log((u'NMJ: Unknown exception: ' + ex(e)), logger.ERROR) return False try: et = etree.fromstring(response) result = et.findtext('returnValue') except SyntaxError as e: logger.log((u'NMJ: Unable to parse XML returned from the Popcorn Hour: %s' % e), logger.ERROR) return False if (int(result) > 0): logger.log((u'NMJ: Popcorn Hour returned an errorcode: %s' % result), logger.ERROR) return False else: logger.log(u'NMJ: Started background scan.', logger.MESSAGE) return True
'Sends a NMJ update command based on the SB config settings host: The host to send the command to (optional, defaults to the host in the config) database: The database to use (optional, defaults to the database in the config) mount: The mount URL (optional, defaults to the mount URL in the config) force: If True then the notification will be sent even if NMJ is disabled in the config'
def _notifyNMJ(self, host=None, database=None, mount=None, force=False):
if ((not sickbeard.USE_NMJ) and (not force)): return False if (not host): host = sickbeard.NMJ_HOST if (not database): database = sickbeard.NMJ_DATABASE if (not mount): mount = sickbeard.NMJ_MOUNT logger.log(u'NMJ: Sending scan command.', logger.DEBUG) return self._sendNMJ(host, database, mount)
'Sends a boxcar2 notification to the address provided msg: The message to send (unicode) title: The title of the message accessToken: The access token to send notification to returns: True if the message succeeded, False otherwise'
def _sendBoxcar2(self, title, msg, accessToken, sound):
msg = msg.strip().encode('utf-8') data = urllib.urlencode({'user_credentials': accessToken, 'notification[title]': ((title + ' - ') + msg), 'notification[long_message]': msg, 'notification[sound]': sound, 'notification[source_name]': 'SickBeard'}) try: req = urllib2.Request(API_URL, data) handle = sickbeard.helpers.getURLFileLike(req, throw_exc=True) handle.close() except urllib2.URLError as e: if (hasattr(e, 'code') and (e.code == 201)): logger.log(u'BOXCAR2: Notification successful.', logger.MESSAGE) return True if (not hasattr(e, 'code')): logger.log((u'BOXCAR2: Notification failed.' + ex(e)), logger.ERROR) else: logger.log((u'BOXCAR2: Notification failed. Error code: ' + str(e.code)), logger.ERROR) if (e.code == 404): logger.log(u'BOXCAR2: Access token is wrong/not associated to a device.', logger.ERROR) elif (e.code == 401): logger.log(u'BOXCAR2: Access token not recognized.', logger.ERROR) elif (e.code == 400): logger.log(u'BOXCAR2: Wrong data sent to boxcar.', logger.ERROR) elif (e.code == 503): logger.log(u'BOXCAR2: Boxcar server to busy to handle the request at this time.', logger.WARNING) return False logger.log(u'BOXCAR2: Notification successful.', logger.MESSAGE) return True
'Sends a boxcar2 notification based on the provided info or SB config title: The title of the notification to send message: The message string to send accessToken: The access token to send the notification to (optional, defaults to the access token in the config) force: If True then the notification will be sent even if Boxcar is disabled in the config'
def _notify(self, title, message, accessToken=None, sound=None, force=False):
if ((not sickbeard.USE_BOXCAR2) and (not force)): return False if (not accessToken): accessToken = sickbeard.BOXCAR2_ACCESS_TOKEN if (not sound): sound = sickbeard.BOXCAR2_SOUND logger.log((u'BOXCAR2: Sending notification for ' + message), logger.DEBUG) return self._sendBoxcar2(title, message, accessToken, sound)
'Sends a pushbullet notification based on the provided info or SB config title: The title of the notification to send body: The body string to send accessToken: The access token to grant access device_iden: The iden of a specific target, if none provided send to all devices force: If True then the notification will be sent even if Pushbullet is disabled in the config'
def _notify(self, title, body, accessToken=None, device_iden=None, force=False):
if ((not sickbeard.USE_PUSHBULLET) and (not force)): return False if (not accessToken): accessToken = sickbeard.PUSHBULLET_ACCESS_TOKEN if (not device_iden): device_iden = sickbeard.PUSHBULLET_DEVICE_IDEN logger.log((u'PUSHBULLET: Sending notice with details: title="%s", body="%s", device_iden="%s"' % (title, body, device_iden)), logger.DEBUG) return self._sendPushbullet(title, body, accessToken, device_iden)
'A generic method for communicating with trakt. Uses the method and data provided along with the auth info to send the command. method: The URL to use at trakt, relative, no leading slash. api: The API string to provide to trakt username: The username to use when logging in password: The unencrypted password to use when logging in Returns: A boolean representing success'
def _notifyTrakt(self, method, api, username, password, data={}, force=False):
if ((not sickbeard.USE_TRAKT) and (not force)): return False logger.log((u'TRAKT: Calling method ' + method), logger.DEBUG) if (not api): api = sickbeard.TRAKT_API if (not username): username = sickbeard.TRAKT_USERNAME if (not password): password = sickbeard.TRAKT_PASSWORD password = sha1(password).hexdigest() method += api data['username'] = username data['password'] = password encoded_data = json.dumps(data) logger.log((((u'TRAKT: Calling method http://api.trakt.tv/' + method) + ', with data') + encoded_data), logger.DEBUG) req = urllib2.Request(('http://api.trakt.tv/' + method), encoded_data) urlResp = sickbeard.helpers.getURL(req) if urlResp: resp = json.loads(urlResp) else: return False if ('error' in resp): raise Exception(resp['error']) if (resp['status'] == 'success'): logger.log((u'TRAKT: Succeeded calling method. Result: ' + resp['message']), logger.MESSAGE) return True logger.log(u'TRAKT: Failed calling method', logger.ERROR) return False
'Sends a test notification to trakt with the given authentication info and returns a boolean representing success. api: The api string to use username: The username to use password: The password to use Returns: True if the request succeeded, False otherwise'
def test_notify(self, api, username, password):
method = 'account/test/' return self._notifyTrakt(method, api, username, password, {}, force=True)
'Sends a request to trakt indicating that the given episode is part of our library. ep_obj: The TVEpisode object to add to trakt'
def update_library(self, ep_obj=None):
if sickbeard.USE_TRAKT: method = 'show/episode/library/' data = {'tvdb_id': ep_obj.show.tvdbid, 'title': ep_obj.show.name, 'year': ep_obj.show.startyear, 'episodes': [{'season': ep_obj.season, 'episode': ep_obj.episode}]} if data: self._notifyTrakt(method, None, None, None, data)
'Sends a pushover notification based on the provided info or SB config'
def _notify(self, title, message, userKey=None, priority=None, device=None, sound=None, force=False):
if ((not sickbeard.USE_PUSHOVER) and (not force)): return False if (not userKey): userKey = sickbeard.PUSHOVER_USERKEY if (not priority): priority = sickbeard.PUSHOVER_PRIORITY if (not device): device = sickbeard.PUSHOVER_DEVICE if (not sound): sound = sickbeard.PUSHOVER_SOUND logger.log((u'PUSHOVER: Sending notice with details: title="%s", message="%s", userkey=%s, priority=%s, device=%s, sound=%s' % (title, message, userKey, priority, device, sound)), logger.DEBUG) return self._sendPushover(title, message, userKey, priority, device, sound)
'Retrieves the NMJv2 database location from Popcorn Hour host: The hostname/IP of the Popcorn Hour server dbloc: \'local\' for PCH internal harddrive. \'network\' for PCH network shares instance: Allows for selection of different DB in case of multiple databases Returns: True if the settings were retrieved successfully, False otherwise'
def notify_settings(self, host, dbloc, instance):
try: url_loc = (((('http://' + host) + ':8008/file_operation?arg0=list_user_storage_file&arg1=&arg2=') + instance) + '&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false') req = urllib2.Request(url_loc) response1 = sickbeard.helpers.getURL(req) xml = parseString(response1) time.sleep(0.5) for node in xml.getElementsByTagName('path'): xmlTag = node.toxml() xmlData = xmlTag.replace('<path>', '').replace('</path>', '').replace('[=]', '') url_db = ((('http://' + host) + ':8008/metadata_database?arg0=check_database&arg1=') + xmlData) reqdb = urllib2.Request(url_db) responsedb = sickbeard.helpers.getURL(reqdb) xmldb = parseString(responsedb) returnvalue = xmldb.getElementsByTagName('returnValue')[0].toxml().replace('<returnValue>', '').replace('</returnValue>', '') if (returnvalue == '0'): DB_path = xmldb.getElementsByTagName('database_path')[0].toxml().replace('<database_path>', '').replace('</database_path>', '').replace('[=]', '') if ((dbloc == 'local') and (DB_path.find('localhost') > (-1))): sickbeard.NMJv2_HOST = host sickbeard.NMJv2_DATABASE = DB_path return True if ((dbloc == 'network') and (DB_path.find('://') > (-1))): sickbeard.NMJv2_HOST = host sickbeard.NMJv2_DATABASE = DB_path return True except IOError as e: logger.log((u'NMJv2: Could not contact Popcorn Hour on host %s: %s' % (host, e)), logger.WARNING) return False return False
'Sends a NMJ update command to the specified machine host: The hostname/IP to send the request to (no port) database: The database to send the request to mount: The mount URL to use (optional) Returns: True if the request succeeded, False otherwise'
def _sendNMJ(self, host):
try: url_scandir = (((('http://' + host) + ':8008/metadata_database?arg0=update_scandir&arg1=') + sickbeard.NMJv2_DATABASE) + '&arg2=&arg3=update_all') logger.log((u'NMJv2: Scan update command send to host: %s' % host), logger.DEBUG) url_updatedb = (((('http://' + host) + ':8008/metadata_database?arg0=scanner_start&arg1=') + sickbeard.NMJv2_DATABASE) + '&arg2=background&arg3=') logger.log((u'NMJv2: Try to mount network drive via url: %s' % host), logger.DEBUG) prereq = urllib2.Request(url_scandir) req = urllib2.Request(url_updatedb) response1 = sickbeard.helpers.getURL(prereq) time.sleep(0.5) response2 = sickbeard.helpers.getURL(req) except IOError as e: logger.log((u'NMJv2: Could not contact Popcorn Hour on host %s: %s' % (host, e)), logger.WARNING) return False try: et = etree.fromstring(response1) result1 = et.findtext('returnValue') except SyntaxError as e: logger.log((u'NMJv2: Unable to parse XML returned from the Popcorn Hour: update_scandir, %s' % e), logger.ERROR) return False try: et = etree.fromstring(response2) result2 = et.findtext('returnValue') except SyntaxError as e: logger.log((u'NMJv2: Unable to parse XML returned from the Popcorn Hour: scanner_start, %s' % e), logger.ERROR) return False error_codes = ['8', '11', '22', '49', '50', '51', '60'] error_messages = ['Invalid parameter(s)/argument(s)', 'Invalid database path', 'Insufficient size', 'Database write error', 'Database read error', 'Open fifo pipe failed', 'Read only file system'] if (int(result1) > 0): index = error_codes.index(result1) logger.log((u'NMJv2: Popcorn Hour returned an error: %s' % error_messages[index]), logger.ERROR) return False elif (int(result2) > 0): index = error_codes.index(result2) logger.log((u'NMJv2: Popcorn Hour returned an error: %s' % error_messages[index]), logger.ERROR) return False else: logger.log(u'NMJv2: Started background scan.', logger.MESSAGE) return True
'Sends a NMJ update command based on the SB config settings host: The host to send the command to (optional, defaults to the host in the config) database: The database to use (optional, defaults to the database in the config) mount: The mount URL (optional, defaults to the mount URL in the config) force: If True then the notification will be sent even if NMJ is disabled in the config'
def _notifyNMJ(self, host=None, force=False):
if ((not sickbeard.USE_NMJv2) and (not force)): return False if (not host): host = sickbeard.NMJv2_HOST logger.log(u'NMJv2: Sending scan command.', logger.DEBUG) return self._sendNMJ(host)
'Returns a tuple containing (status, quality)'
@staticmethod def splitCompositeStatus(status):
if (status == UNKNOWN): return (UNKNOWN, Quality.UNKNOWN) for x in sorted(Quality.qualityStrings.keys(), reverse=True): if (status > (x * 100)): return ((status - (x * 100)), x) return (status, Quality.NONE)
'This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER'
def isEnabled(self):
return False
'Returns a result of the correct type for this provider'
def getResult(self, episodes):
if (self.providerType == GenericProvider.NZB): result = classes.NZBSearchResult(episodes) elif (self.providerType == GenericProvider.TORRENT): result = classes.TorrentSearchResult(episodes) else: result = classes.SearchResult(episodes) result.provider = self return result
'By default this is just a simple urlopen call but this method should be overridden for providers with special URL requirements (like cookies)'
def getURL(self, url, post_data=None, heads=None):
if post_data: if heads: req = urllib2.Request(url, post_data, heads) else: req = urllib2.Request(url, post_data) elif heads: req = urllib2.Request(url, headers=heads) else: req = urllib2.Request(url) response = helpers.getURL(req) if (response is None): logger.log((((u'Error loading ' + self.name) + ' URL: ') + url), logger.ERROR) return response
'Save the result to disk.'
def downloadResult(self, result):
logger.log((((u'Downloading a result from ' + self.name) + ' at ') + result.url)) data = self.getURL(result.url) if (not data): return False if (self.providerType == GenericProvider.NZB): saveDir = sickbeard.NZB_DIR writeMode = 'w' elif (self.providerType == GenericProvider.TORRENT): saveDir = sickbeard.TORRENT_DIR writeMode = 'wb' else: return False file_name = ek.ek(os.path.join, saveDir, ((helpers.sanitizeFileName(result.name) + '.') + self.providerType)) logger.log((u'Saving to ' + file_name), logger.DEBUG) try: with open(file_name, writeMode) as fileOut: fileOut.write(data) helpers.chmodAsParent(file_name) except EnvironmentError as e: logger.log((u'Unable to save the file: ' + ex(e)), logger.ERROR) return False return self._verify_download(file_name)
'Checks the saved file to see if it was actually valid, if not then consider the download a failure.'
def _verify_download(self, file_name=None):
if (self.providerType == GenericProvider.TORRENT): parser = createParser(file_name) if parser: mime_type = parser._getMimeType() try: parser.stream._input.close() except: pass if (mime_type != 'application/x-bittorrent'): logger.log(u'Result is not a valid torrent file', logger.WARNING) return False return True
'Figures out the quality of the given RSS item node item: An elementtree.ElementTree element representing the <item> tag of the RSS feed Returns a Quality value obtained from the node\'s data'
def getQuality(self, item):
(title, url) = self._get_title_and_url(item) quality = Quality.nameQuality(title) return quality
'Retrieves the title and URL data from the item XML node item: An elementtree.ElementTree element representing the <item> tag of the RSS feed Returns: A tuple containing two strings representing title and URL respectively'
def _get_title_and_url(self, item):
title = helpers.get_xml_text(item.find('title')) if title: title = title.replace(' ', '.') url = helpers.get_xml_text(item.find('link')) if url: url = url.replace('&amp;', '&') return (title, url)
'Add a regular notification to the queue title: The title of the notification message: The message portion of the notification'
def message(self, title, message=''):
self._messages.append(Notification(title, message, MESSAGE))
'Add an error notification to the queue title: The title of the notification message: The message portion of the notification'
def error(self, title, message=''):
self._errors.append(Notification(title, message, ERROR))
'Return all the available notifications in a list. Marks them all as seen as it returns them. Also removes timed out Notifications from the queue. Returns: A list of Notification objects'
def get_notifications(self):
self._errors = [x for x in self._errors if (not x.is_expired())] self._messages = [x for x in self._messages if (not x.is_expired())] return [x.see() for x in (self._errors + self._messages) if x.is_new()]
'Returns True if the notification hasn\'t been displayed to the current client (aka IP address).'
def is_new(self):
return (cherrypy.request.remote.ip not in self._seen)
'Returns True if the notification is older than the specified timeout value.'
def is_expired(self):
return ((datetime.datetime.now() - self._when) > self._timeout)
'Returns this notification object and marks it as seen by the client ip'
def see(self):
self._seen.append(cherrypy.request.remote.ip) return self
'Returns the show name if there is a show object created, if not returns the dir that the show is being added to.'
def _getName(self):
if (self.show is None): return self.showDir return self.show.name
'Returns True if we\'ve gotten far enough to have a show object, or False if we still only know the folder name.'
def _isLoading(self):
if (self.show is None): return True return False
'Cleans up series name by removing any . and _ characters, along with any trailing hyphens. Is basically equivalent to replacing all _ and . with a space, but handles decimal numbers in string, for example: >>> cleanRegexedSeriesName("an.example.1.0.test") \'an example 1.0 test\' >>> cleanRegexedSeriesName("an_example_1.0_test") \'an example 1.0 test\' Stolen from dbr\'s tvnamer'
def clean_series_name(self, series_name):
series_name = re.sub('(\\D)\\.(?!\\s)(\\D)', '\\1 \\2', series_name) series_name = re.sub('(\\d)\\.(\\d{4})', '\\1 \\2', series_name) series_name = re.sub('(\\D)\\.(?!\\s)', '\\1 ', series_name) series_name = re.sub('\\.(?!\\s)(\\D)', ' \\1', series_name) series_name = series_name.replace('_', ' ') series_name = re.sub('-$', '', series_name) return series_name.strip()
'Convert number into an integer. Try force converting into integer first, on error try converting from Roman numerals. Args: number: int or representation of a number: string or unicode Returns: integer: int number Raises: ValueError'
def _convert_number(self, number):
try: integer = int(number) except: roman_numeral_map = (('M', 1000, 3), ('CM', 900, 1), ('D', 500, 1), ('CD', 400, 1), ('C', 100, 3), ('XC', 90, 1), ('L', 50, 1), ('XL', 40, 1), ('X', 10, 3), ('IX', 9, 1), ('V', 5, 1), ('IV', 4, 1), ('I', 1, 3)) roman_numeral = str(number).upper() integer = 0 index = 0 for (numeral, value, max_count) in roman_numeral_map: count = 0 while (roman_numeral[index:(index + len(numeral))] == numeral): count += 1 if (count > max_count): raise ValueError('not a roman numeral') integer += value index += len(numeral) if (index < len(roman_numeral)): raise ValueError('not a roman numeral') return integer
'Access the API at the path given and with the optional params given. path: A list of the path elements to use (eg. [\'repos\', \'midgetspy\', \'Sick-Beard\', \'commits\']) params: Optional dict of name/value pairs for extra params to send. (eg. {\'per_page\': 10}) Returns a deserialized json object of the result. Doesn\'t do any error checking (hope it works).'
def _access_API(self, path, params=None):
url = ('https://api.github.com/' + '/'.join(path)) if (params and (type(params) is dict)): url += ('?' + '&'.join([((str(x) + '=') + str(params[x])) for x in params.keys()])) data = helpers.getURL(url) if data: json_data = json.loads(data) return json_data else: return []
'Uses the API to get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD. user: The github username of the person whose repo you\'re querying repo: The repo name to query branch: Optional, the branch name to show commits from Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/'
def commits(self):
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'], params={'per_page': 100, 'sha': self.branch}) return access_API
'Uses the API to get a list of compares between base and head. user: The github username of the person whose repo you\'re querying repo: The repo name to query base: Start compare from branch head: Current commit sha or branch name to compare per_page: number of items per page Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/'
def compare(self, base, head, per_page=1):
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'compare', ((base + '...') + head)], params={'per_page': per_page}) return access_API
'Update bitwise flags to reflect new quality values Check flag bits (clear old then set their new locations) starting with the highest bits so we dont overwrite data we need later on'
def _update_quality(self, old_quality):
result = old_quality if (result & (1 << 5)): result = (result & (~ (1 << 5))) result = (result | (1 << 8)) if (result & (1 << 4)): result = (result & (~ (1 << 4))) result = (result | (1 << 7)) if (result & (1 << 3)): result = (result & (~ (1 << 3))) result = (result | (1 << 5)) return result
'Unpack, Update, Return new quality values Unpack the composite archive/initial values. Update either qualities if needed. Then return the new compsite quality value.'
def _update_composite_qualities(self, status):
best = ((status & (65535 << 16)) >> 16) initial = (status & 65535) best = self._update_quality(best) initial = self._update_quality(initial) result = ((best << 16) | initial) return result
'Configure a file handler to log at file_name and return it.'
def _config_handler(self):
file_handler = logging.FileHandler(self.log_file_path, encoding='utf-8') file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S')) return file_handler
'Returns a numbered log file name depending on i. If i==0 it just uses logName, if not it appends it to the extension (blah.log.3 for i == 3) i: Log number to ues'
def _log_file_name(self, i):
return (self.log_file_path + (('.' + str(i)) if i else ''))
'Scans the log folder and figures out how many log files there are already on disk Returns: The number of the last used file (eg. mylog.log.3 would return 3). If there are no logs it returns -1'
def _num_logs(self):
cur_log = 0 while os.path.isfile(self._log_file_name(cur_log)): cur_log += 1 return (cur_log - 1)
'Saves this episode to the database if any of its data has been changed since the last save. forceSave: If True it will save to the database even if no data has been changed since the last save (aka if the record is not dirty).'
def saveToDB(self, forceSave=False):
if ((not self.dirty) and (not forceSave)): logger.log((str(self.show.tvdbid) + u': Not saving episode to db - record is not dirty'), logger.DEBUG) return logger.log((str(self.show.tvdbid) + u': Saving episode details to database'), logger.DEBUG) logger.log((u'STATUS IS ' + str(self.status)), logger.DEBUG) myDB = db.DBConnection() newValueDict = {'tvdbid': self.tvdbid, 'name': self.name, 'description': self.description, 'airdate': self.airdate.toordinal(), 'hasnfo': self.hasnfo, 'hastbn': self.hastbn, 'status': self.status, 'location': self.location, 'file_size': self.file_size, 'release_name': self.release_name} controlValueDict = {'showid': self.show.tvdbid, 'season': self.season, 'episode': self.episode} myDB.upsert('tv_episodes', newValueDict, controlValueDict)
'Returns the name of this episode in a "pretty" human-readable format. Used for logging and notifications and such. Returns: A string representing the episode\'s name and season/ep numbers'
def prettyName(self):
return self._format_pattern('%SN - %Sx%0E - %EN')
'Returns the name of the episode to use during renaming. Combines the names of related episodes. Eg. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name" "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name"'
def _ep_name(self):
multiNameRegex = '(.*) \\(\\d{1,2}\\)' self.relatedEps = sorted(self.relatedEps, key=(lambda x: x.episode)) if (len(self.relatedEps) == 0): goodName = self.name else: goodName = '' singleName = True curGoodName = None for curName in ([self.name] + [x.name for x in self.relatedEps]): match = re.match(multiNameRegex, curName) if (not match): singleName = False break if (curGoodName is None): curGoodName = match.group(1) elif (curGoodName != match.group(1)): singleName = False break if singleName: goodName = curGoodName else: goodName = self.name for relEp in self.relatedEps: goodName += (' & ' + relEp.name) return goodName
'Generates a replacement map for this episode which maps all possible custom naming patterns to the correct value for this episode. Returns: A dict with patterns as the keys and their replacement values as the values.'
def _replace_map(self):
ep_name = self._ep_name() def dot(name): return helpers.sanitizeSceneName(name) def us(name): return re.sub('[ -]', '_', name) def release_name(name): if name: name = helpers.remove_non_release_groups(helpers.remove_extension(name)) return name def release_group(name): if name: name = helpers.remove_non_release_groups(helpers.remove_extension(name)) else: return '' np = NameParser(False) try: parse_result = np.parse(name) except InvalidNameException as e: logger.log((u'Unable to get parse release_group: ' + ex(e)), logger.DEBUG) return '' if (not parse_result.release_group): return '' return parse_result.release_group (epStatus, epQual) = Quality.splitCompositeStatus(self.status) return {'%SN': self.show.name, '%S.N': dot(self.show.name), '%S_N': us(self.show.name), '%EN': ep_name, '%E.N': dot(ep_name), '%E_N': us(ep_name), '%QN': Quality.qualityStrings[epQual], '%Q.N': dot(Quality.qualityStrings[epQual]), '%Q_N': us(Quality.qualityStrings[epQual]), '%S': str(self.season), '%0S': ('%02d' % self.season), '%E': str(self.episode), '%0E': ('%02d' % self.episode), '%RN': release_name(self.release_name), '%RG': release_group(self.release_name), '%AD': str(self.airdate).replace('-', ' '), '%A.D': str(self.airdate).replace('-', '.'), '%A_D': us(str(self.airdate)), '%A-D': str(self.airdate), '%Y': str(self.airdate.year), '%M': str(self.airdate.month), '%D': str(self.airdate.day), '%0M': ('%02d' % self.airdate.month), '%0D': ('%02d' % self.airdate.day)}
'Replaces all template strings with the correct value'
def _format_string(self, pattern, replace_map):
result_name = pattern for cur_replacement in sorted(replace_map.keys(), reverse=True): result_name = result_name.replace(cur_replacement, helpers.sanitizeFileName(replace_map[cur_replacement])) result_name = result_name.replace(cur_replacement.lower(), helpers.sanitizeFileName(replace_map[cur_replacement].lower())) return result_name
'Manipulates an episode naming pattern and then fills the template in'
def _format_pattern(self, pattern=None, multi=None, debug=False):
if (pattern is None): pattern = sickbeard.NAMING_PATTERN if (multi is None): multi = sickbeard.NAMING_MULTI_EP replace_map = self._replace_map() result_name = pattern if (not replace_map['%RN']): if self.show.air_by_date: result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-SiCKBEARD') result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-sickbeard') else: result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-SiCKBEARD') result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-sickbeard') if debug: logger.log((u'Episode has no release name, replacing it with a generic one: ' + result_name), logger.DEBUG) if (not replace_map['%RG']): result_name = result_name.replace('%RG', 'SiCKBEARD') result_name = result_name.replace('%rg', 'sickbeard') if debug: logger.log((u'Episode has no release group, replacing it with a generic one: ' + result_name), logger.DEBUG) name_groups = re.split('[\\\\/]', result_name) for cur_name_group in name_groups: season_format = sep = ep_sep = ep_format = None season_ep_regex = '\n (?P<pre_sep>[ _.-]*)\n ((?:s(?:eason|eries)?\\s*)?%0?S(?![._]?N))\n (.*?)\n (%0?E(?![._]?N))\n (?P<post_sep>[ _.-]*)\n ' ep_only_regex = '(E?%0?E(?![._]?N))' season_ep_match = re.search(season_ep_regex, cur_name_group, (re.I | re.X)) ep_only_match = re.search(ep_only_regex, cur_name_group, (re.I | re.X)) if season_ep_match: season_format = season_ep_match.group(2) ep_sep = season_ep_match.group(3) ep_format = season_ep_match.group(4) sep = season_ep_match.group('pre_sep') if (not sep): sep = season_ep_match.group('post_sep') if (not sep): sep = ' ' if (multi in (NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED)): ep_sep = '-' regex_used = season_ep_regex elif ep_only_match: season_format = '' ep_sep = '-' ep_format = ep_only_match.group(1) sep = '' regex_used = ep_only_regex else: continue if ((not ep_sep) or (not ep_format)): continue ep_string = self._format_string(ep_format.upper(), replace_map) for other_ep in self.relatedEps: if ((multi in (NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED)) and (other_ep != self.relatedEps[(-1)])): continue elif (multi == NAMING_DUPLICATE): ep_string += (sep + season_format) elif (multi == NAMING_SEPARATED_REPEAT): ep_string += sep ep_string += ep_sep if (multi == NAMING_LIMITED_EXTEND_E_PREFIXED): ep_string += 'E' ep_string += other_ep._format_string(ep_format.upper(), other_ep._replace_map()) if season_ep_match: regex_replacement = (('\\g<pre_sep>\\g<2>\\g<3>' + ep_string) + '\\g<post_sep>') elif ep_only_match: regex_replacement = ep_string cur_name_group_result = re.sub(('(?i)(?x)' + regex_used), regex_replacement, cur_name_group) result_name = result_name.replace(cur_name_group, cur_name_group_result) result_name = self._format_string(result_name, replace_map) if debug: logger.log((((u'formatting pattern: ' + pattern) + ' -> ') + result_name), logger.DEBUG) return result_name
'Figures out the path where this episode SHOULD live according to the renaming rules, relative from the show dir'
def proper_path(self):
result = self.formatted_filename() if (self.show.flatten_folders and (not sickbeard.NAMING_FORCE_FOLDERS)): return result else: result = ek.ek(os.path.join, self.formatted_dir(), result) return result
'Just the folder name of the episode'
def formatted_dir(self, pattern=None, multi=None, debug=False):
if (pattern is None): if (self.show.air_by_date and sickbeard.NAMING_CUSTOM_ABD and (not self.relatedEps)): pattern = sickbeard.NAMING_ABD_PATTERN else: pattern = sickbeard.NAMING_PATTERN name_groups = re.split('[\\\\/]', pattern) if (len(name_groups) == 1): return '' else: return self._format_pattern(os.sep.join(name_groups[:(-1)]), multi, debug)
'Just the filename of the episode, formatted based on the naming settings'
def formatted_filename(self, pattern=None, multi=None, debug=False):
if (pattern is None): if (self.show.air_by_date and sickbeard.NAMING_CUSTOM_ABD and (not self.relatedEps)): pattern = sickbeard.NAMING_ABD_PATTERN else: pattern = sickbeard.NAMING_PATTERN name_groups = re.split('[\\\\/]', pattern) return self._format_pattern(name_groups[(-1)], multi, debug)
'Renames an episode file and all related files to the location and filename as specified in the naming settings.'
def rename(self):
if (not ek.ek(os.path.isfile, self.location)): logger.log(((u"Can't perform rename on " + self.location) + " when it doesn't exist, skipping"), logger.WARNING) return proper_path = self.proper_path() absolute_proper_path = ek.ek(os.path.join, self.show.location, proper_path) (absolute_current_path_no_ext, file_ext) = ek.ek(os.path.splitext, self.location) absolute_current_path_no_ext_length = len(absolute_current_path_no_ext) current_path = absolute_current_path_no_ext if absolute_current_path_no_ext.startswith(self.show.location): current_path = absolute_current_path_no_ext[len(self.show.location):] logger.log((((u'Renaming/moving episode from the base path ' + self.location) + ' to ') + absolute_proper_path), logger.DEBUG) if (proper_path == current_path): logger.log((((str(self.tvdbid) + u': File ') + self.location) + ' is already named correctly, skipping'), logger.DEBUG) return related_files = helpers.list_associated_files(self.location, base_name_only=True) logger.log((((u'Files associated to ' + self.location) + ': ') + str(related_files)), logger.DEBUG) result = helpers.rename_ep_file(self.location, absolute_proper_path, absolute_current_path_no_ext_length) for cur_related_file in related_files: cur_result = helpers.rename_ep_file(cur_related_file, absolute_proper_path, absolute_current_path_no_ext_length) if (cur_result is False): logger.log(((str(self.tvdbid) + u': Unable to rename file ') + cur_related_file), logger.ERROR) with self.lock: if (result is not False): self.location = (absolute_proper_path + file_ext) for relEp in self.relatedEps: relEp.location = (absolute_proper_path + file_ext) for curEp in ([self] + self.relatedEps): curEp.checkForMetaFiles() with self.lock: self.saveToDB() for relEp in self.relatedEps: relEp.saveToDB()
'Implementing classes should call this'
def execute(self):
self.inProgress = True
'Implementing Classes should call this'
def finish(self):
self.inProgress = False
'Read the request body into fp_out (or make_file() if None). Return fp_out.'
def read_into_file(self, fp_out=None):
if (fp_out is None): fp_out = self.make_file() self.read(fp_out=fp_out) return fp_out
'Return a file into which the request body will be read. By default, this will return a TemporaryFile. Override as needed.'
def make_file(self):
return tempfile.TemporaryFile()
'Return this entity as a string, whether stored in a file or not.'
def fullvalue(self):
if self.file: self.file.seek(0) value = self.file.read() self.file.seek(0) else: value = self.value return value
'Execute the best-match processor for the given media type.'
def process(self):
proc = None ct = self.content_type.value try: proc = self.processors[ct] except KeyError: toptype = ct.split(u'/', 1)[0] try: proc = self.processors[toptype] except KeyError: pass if (proc is None): self.default_proc() else: proc(self)
'Read bytes from self.fp and return or write them to a file. If the \'fp_out\' argument is None (the default), all bytes read are returned in a single byte string. If the \'fp_out\' argument is not None, it must be a file-like object that supports the \'write\' method; all bytes read will be written to the fp, and that fp is returned.'
def read_lines_to_boundary(self, fp_out=None):
endmarker = (self.boundary + '--') delim = '' prev_lf = True lines = [] seen = 0 while True: line = self.fp.readline((1 << 16)) if (not line): raise EOFError(u'Illegal end of multipart body.') if (line.startswith('--') and prev_lf): strippedline = line.strip() if (strippedline == self.boundary): break if (strippedline == endmarker): self.fp.finish() break line = (delim + line) if line.endswith('\r\n'): delim = '\r\n' line = line[:(-2)] prev_lf = True elif line.endswith('\n'): delim = '\n' line = line[:(-1)] prev_lf = True else: delim = '' prev_lf = False if (fp_out is None): lines.append(line) seen += len(line) if (seen > self.maxrambytes): fp_out = self.make_file() for line in lines: fp_out.write(line) else: fp_out.write(line) if (fp_out is None): result = ''.join(lines) for charset in self.attempt_charsets: try: result = result.decode(charset) except UnicodeDecodeError: pass else: self.charset = charset return result else: raise cherrypy.HTTPError(400, ('The request entity could not be decoded. The following charsets were attempted: %s' % repr(self.attempt_charsets))) else: fp_out.seek(0) return fp_out
'Read the request body into fp_out (or make_file() if None). Return fp_out.'
def read_into_file(self, fp_out=None):
if (fp_out is None): fp_out = self.make_file() self.read_lines_to_boundary(fp_out=fp_out) return fp_out
'Read bytes from the request body and return or write them to a file. A number of bytes less than or equal to the \'size\' argument are read off the socket. The actual number of bytes read are tracked in self.bytes_read. The number may be smaller than \'size\' when 1) the client sends fewer bytes, 2) the \'Content-Length\' request header specifies fewer bytes than requested, or 3) the number of bytes read exceeds self.maxbytes (in which case, 413 is raised). If the \'fp_out\' argument is None (the default), all bytes read are returned in a single byte string. If the \'fp_out\' argument is not None, it must be a file-like object that supports the \'write\' method; all bytes read will be written to the fp, and None is returned.'
def read(self, size=None, fp_out=None):
if (self.length is None): if (size is None): remaining = inf else: remaining = size else: remaining = (self.length - self.bytes_read) if (size and (size < remaining)): remaining = size if (remaining == 0): self.finish() if (fp_out is None): return '' else: return None chunks = [] if self.buffer: if (remaining is inf): data = self.buffer self.buffer = '' else: data = self.buffer[:remaining] self.buffer = self.buffer[remaining:] datalen = len(data) remaining -= datalen self.bytes_read += datalen if (self.maxbytes and (self.bytes_read > self.maxbytes)): raise cherrypy.HTTPError(413) if (fp_out is None): chunks.append(data) else: fp_out.write(data) while (remaining > 0): chunksize = min(remaining, self.bufsize) try: data = self.fp.read(chunksize) except Exception as e: if (e.__class__.__name__ == 'MaxSizeExceeded'): raise cherrypy.HTTPError(413, ('Maximum request length: %r' % e.args[1])) else: raise if (not data): self.finish() break datalen = len(data) remaining -= datalen self.bytes_read += datalen if (self.maxbytes and (self.bytes_read > self.maxbytes)): raise cherrypy.HTTPError(413) if (fp_out is None): chunks.append(data) else: fp_out.write(data) if (fp_out is None): return ''.join(chunks)
'Read a line from the request body and return it.'
def readline(self, size=None):
chunks = [] while ((size is None) or (size > 0)): chunksize = self.bufsize if ((size is not None) and (size < self.bufsize)): chunksize = size data = self.read(chunksize) if (not data): break pos = (data.find('\n') + 1) if pos: chunks.append(data[:pos]) remainder = data[pos:] self.buffer += remainder self.bytes_read -= len(remainder) break else: chunks.append(data) return ''.join(chunks)