desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'For API-compatibility with RepositoryProvider'
| def get_broken_dependencies(self):
| return {}.items()
|
'For API-compatibility with RepositoryProvider'
| def get_dependencies(self):
| return {}.items()
|
'Uses the GitHub API to construct necessary info for all packages
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
\'Package Name\',
\'name\': name,
\'description\': description,
\'author\': author,
\'homepage\': homepage,
\'last_modified\': last modified date,
\'releases\': [
\'sublime_text\': \'*\',
\'platforms\': [\'*\'],
\'url\': url,
\'date\': date,
\'version\': version
\'previous_names\': [],
\'labels\': [],
\'sources\': [the user URL],
\'readme\': url,
\'issues\': url,
\'donate\': url,
\'buy\': None
tuples'
| def get_packages(self, invalid_sources=None):
| if ('get_packages' in self.cache):
for (key, value) in self.cache['get_packages'].items():
(yield (key, value))
return
client = GitHubClient(self.settings)
if ((invalid_sources is not None) and (self.repo in invalid_sources)):
raise StopIteration()
try:
user_repos = client.user_info(self.repo)
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources = [self.repo]
self.cache['get_packages'] = e
raise e
output = {}
for repo_info in user_repos:
try:
name = repo_info['name']
repo_url = ('https://github.com/%s/%s' % (repo_info['author'], name))
releases = []
for download in client.download_info(repo_url):
download['sublime_text'] = '*'
download['platforms'] = ['*']
releases.append(download)
details = {'name': name, 'description': repo_info['description'], 'homepage': repo_info['homepage'], 'author': repo_info['author'], 'last_modified': releases[0].get('date'), 'releases': releases, 'previous_names': [], 'labels': [], 'sources': [self.repo], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], 'buy': None}
output[name] = details
(yield (name, details))
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[repo_url] = e
self.cache['get_packages'] = output
|
'Return a list of current URLs that are directly referenced by the repo
:return:
A list of URLs'
| def get_sources(self):
| return [self.repo]
|
'For API-compatibility with RepositoryProvider'
| def get_renamed_packages(self):
| return {}
|
'Indicates if this provider can handle the provided repo'
| @classmethod
def match_url(cls, repo):
| return True
|
'Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info'
| def prefetch(self):
| [name for (name, info) in self.get_packages()]
|
'List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://example.com", Exception()) tuples'
| def get_failed_sources(self):
| return self.failed_sources.items()
|
'List of package names for packages that are missing information
:return:
A generator of ("Package Name", Exception()) tuples'
| def get_broken_packages(self):
| return self.broken_packages.items()
|
'List of dependency names for dependencies that are missing information
:return:
A generator of ("Dependency Name", Exception()) tuples'
| def get_broken_dependencies(self):
| return self.broken_dependencies.items()
|
'Retrieves and loads the JSON for other methods to use
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL'
| def fetch(self):
| if (self.repo_info is not None):
return
self.repo_info = self.fetch_location(self.repo)
for key in ['packages', 'dependencies']:
if (key not in self.repo_info):
self.repo_info[key] = []
if ('includes' not in self.repo_info):
return
if (re.match('https?://', self.repo, re.I) is None):
relative_base = os.path.dirname(self.repo)
is_http = False
else:
is_http = True
includes = self.repo_info.get('includes', [])
del self.repo_info['includes']
for include in includes:
if re.match('^\\./|\\.\\./', include):
if is_http:
include = urljoin(self.repo, include)
else:
include = os.path.join(relative_base, include)
include = os.path.normpath(include)
include_info = self.fetch_location(include)
included_packages = include_info.get('packages', [])
self.repo_info['packages'].extend(included_packages)
included_dependencies = include_info.get('dependencies', [])
self.repo_info['dependencies'].extend(included_dependencies)
|
'Fetch the repository and validates that it is parse-able
:return:
Boolean if the repo was fetched and validated'
| def fetch_and_validate(self):
| if (self.repo in self.failed_sources):
return False
if (self.repo_info is not None):
return True
try:
self.fetch()
except (DownloaderException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
return False
def fail(message):
exception = ProviderException(message)
self.failed_sources[self.repo] = exception
self.cache['get_packages'] = {}
return
schema_error = (u'Repository %s does not appear to be a valid repository file because ' % self.repo)
if ('schema_version' not in self.repo_info):
error_string = (u'%s the "schema_version" JSON key is missing.' % schema_error)
fail(error_string)
return False
try:
self.schema_version = self.repo_info.get('schema_version')
if isinstance(self.schema_version, int):
self.schema_version = float(self.schema_version)
if isinstance(self.schema_version, float):
self.schema_version = str_cls(self.schema_version)
except ValueError:
error_string = (u'%s the "schema_version" is not a valid number.' % schema_error)
fail(error_string)
return False
if (self.schema_version not in ['1.0', '1.1', '1.2', '2.0', '3.0.0']):
fail(text.format(u'\n %s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2, 2.0 or 3.0.0.\n ', schema_error))
return False
version_parts = self.schema_version.split('.')
self.schema_major_version = int(version_parts[0])
if ('packages' not in self.repo_info):
error_string = (u'%s the "packages" JSON key is missing.' % schema_error)
fail(error_string)
return False
if isinstance(self.repo_info['packages'], dict):
fail(text.format(u'\n %s the "packages" key is an object, not an array. This indicates it is a channel not a repository.\n ', schema_error))
return False
return True
|
'Fetches the contents of a URL of file path
:param location:
The URL or file path
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the parsed JSON'
| def fetch_location(self, location):
| if re.match('https?://', self.repo, re.I):
with downloader(location, self.settings) as manager:
json_string = manager.fetch(location, 'Error downloading repository.')
else:
if (not os.path.exists(location)):
raise ProviderException((u'Error, file %s does not exist' % location))
if self.settings.get('debug'):
console_write(u'\n Loading %s as a repository\n ', location)
with open(location, 'rb') as f:
json_string = f.read()
try:
return json.loads(json_string.decode('utf-8'))
except ValueError:
raise ProviderException((u'Error parsing JSON from repository %s.' % location))
|
'Provides access to the dependencies in this repository
:param invalid_sources:
A list of URLs that are permissible to fetch data from
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
\'Dependency Name\',
\'name\': name,
\'load_order\': two digit string,
\'description\': description,
\'author\': author,
\'issues\': URL,
\'releases\': [
\'sublime_text\': compatible version,
\'platforms\': [platform name, ...],
\'url\': url,
\'version\': version,
\'sha256\': hex hash
\'sources\': [url, ...]
tuples'
| def get_dependencies(self, invalid_sources=None):
| if ('get_dependencies' in self.cache):
for (key, value) in self.cache['get_dependencies'].items():
(yield (key, value))
return
if ((invalid_sources is not None) and (self.repo in invalid_sources)):
raise StopIteration()
if (not self.fetch_and_validate()):
return
debug = self.settings.get('debug')
github_client = GitHubClient(self.settings)
bitbucket_client = BitBucketClient(self.settings)
if (self.schema_major_version < 3):
self.repo_info['dependencies'] = []
output = {}
for dependency in self.repo_info['dependencies']:
info = {'sources': [self.repo]}
for field in ['name', 'description', 'author', 'issues', 'load_order']:
if dependency.get(field):
info[field] = dependency.get(field)
if ('name' not in info):
self.failed_sources[self.repo] = ProviderException(text.format(u'\n No "name" value for one of the dependencies in the repository %s.\n ', self.repo))
continue
releases = dependency.get('releases', [])
if (releases and (not isinstance(releases, list))):
self.broken_dependencies[info['name']] = ProviderException(text.format(u'\n The "releases" value is not an array for the dependency "%s" in the repository %s.\n ', (info['name'], self.repo)))
continue
for release in releases:
if ('releases' not in info):
info['releases'] = []
download_info = {}
for field in ['platforms', 'sublime_text', 'version', 'url', 'sha256']:
if (field in release):
value = release[field]
if (field == 'url'):
value = update_url(value, debug)
if ((field == 'platforms') and (not isinstance(release['platforms'], list))):
value = [value]
download_info[field] = value
if ('platforms' not in download_info):
download_info['platforms'] = ['*']
tags = release.get('tags')
branch = release.get('branch')
if (tags or branch):
try:
base = None
if ('base' in release):
base = release['base']
if (not base):
raise ProviderException(text.format(u'\n Missing release-level "base" key for one of the releases of the\n dependency "%s" in the repository %s.\n ', (info['name'], self.repo)))
github_url = False
bitbucket_url = False
extra = None
if tags:
github_url = github_client.make_tags_url(base)
bitbucket_url = bitbucket_client.make_tags_url(base)
if (tags is not True):
extra = tags
if branch:
github_url = github_client.make_branch_url(base, branch)
bitbucket_url = bitbucket_client.make_branch_url(base, branch)
if github_url:
downloads = github_client.download_info(github_url, extra)
url = github_url
elif bitbucket_url:
downloads = bitbucket_client.download_info(bitbucket_url, extra)
url = bitbucket_url
else:
raise ProviderException(text.format(u'\n Invalid "base" value "%s" for one of the releases of the\n dependency "%s" in the repository %s.\n ', (base, info['name'], self.repo)))
if (downloads is False):
raise ProviderException(text.format(u'\n No valid semver tags found at %s for the dependency\n "%s" in the repository %s.\n ', (url, info['name'], self.repo)))
for download in downloads:
del download['date']
new_download = download_info.copy()
new_download.update(download)
info['releases'].append(new_download)
except (DownloaderException, ClientException, ProviderException) as e:
self.broken_dependencies[info['name']] = e
continue
elif download_info:
if ('url' in download_info):
is_http = (urlparse(download_info['url']).scheme == 'http')
if (is_http and ('sha256' not in download_info)):
self.broken_dependencies[info['name']] = ProviderException(text.format(u'\n No "sha256" key for the non-secure "url" value in one of the\n releases of the dependency "%s" in the repository %s.\n ', (info['name'], self.repo)))
continue
info['releases'].append(download_info)
if (info['name'] in self.broken_dependencies):
continue
def is_missing_keys():
for key in ['author', 'releases', 'issues', 'description', 'load_order']:
if (key not in info):
self.broken_dependencies[info['name']] = ProviderException(text.format(u'\n No "%s" key for the dependency "%s" in the repository %s.\n ', (key, info['name'], self.repo)))
return True
for release in info.get('releases', []):
for key in ['version', 'url', 'sublime_text', 'platforms']:
if (key not in release):
self.broken_dependencies[info['name']] = ProviderException(text.format(u'\n Missing "%s" key for one of the releases of the dependency "%s" in the repository %s.\n ', (key, info['name'], self.repo)))
return True
return False
if is_missing_keys():
continue
info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
output[info['name']] = info
(yield (info['name'], info))
self.cache['get_dependencies'] = output
|
'Provides access to the packages in this repository
:param invalid_sources:
A list of URLs that are permissible to fetch data from
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
\'Package Name\',
\'name\': name,
\'description\': description,
\'author\': author,
\'homepage\': homepage,
\'last_modified\': last modified date,
\'releases\': [
\'sublime_text\': compatible version,
\'platforms\': [platform name, ...],
\'url\': url,
\'date\': date,
\'version\': version,
\'dependencies\': [dependency name, ...]
\'previous_names\': [old_name, ...],
\'labels\': [label, ...],
\'sources\': [url, ...],
\'readme\': url,
\'issues\': url,
\'donate\': url,
\'buy\': url
tuples'
| def get_packages(self, invalid_sources=None):
| if ('get_packages' in self.cache):
for (key, value) in self.cache['get_packages'].items():
(yield (key, value))
return
if ((invalid_sources is not None) and (self.repo in invalid_sources)):
raise StopIteration()
if (not self.fetch_and_validate()):
return
debug = self.settings.get('debug')
github_client = GitHubClient(self.settings)
bitbucket_client = BitBucketClient(self.settings)
previous_names = {}
if (self.schema_major_version < 2):
renamed = self.get_renamed_packages()
for old_name in renamed:
new_name = renamed[old_name]
if (new_name not in previous_names):
previous_names[new_name] = []
previous_names[new_name].append(old_name)
output = {}
for package in self.repo_info['packages']:
info = {'sources': [self.repo]}
copy_fields = ['name', 'description', 'author', 'last_modified', 'previous_names', 'labels', 'homepage', 'readme', 'issues', 'donate', 'buy']
for field in copy_fields:
if package.get(field):
info[field] = package.get(field)
if (self.schema_major_version >= 2):
details = package.get('details')
releases = package.get('releases')
if details:
if ((invalid_sources is not None) and (details in invalid_sources)):
continue
info['sources'].append(details)
try:
github_repo_info = github_client.repo_info(details)
bitbucket_repo_info = bitbucket_client.repo_info(details)
if github_repo_info:
info = dict(chain(github_repo_info.items(), info.items()))
elif bitbucket_repo_info:
info = dict(chain(bitbucket_repo_info.items(), info.items()))
else:
raise ProviderException(text.format(u'\n Invalid "details" value "%s" for one of the packages in the repository %s.\n ', (details, self.repo)))
except (DownloaderException, ClientException, ProviderException) as e:
if ('name' in info):
self.broken_packages[info['name']] = e
self.failed_sources[details] = e
continue
if ('name' not in info):
self.failed_sources[self.repo] = ProviderException(text.format(u'\n No "name" value for one of the packages in the repository %s.\n ', self.repo))
continue
info['releases'] = []
if (self.schema_major_version == 2):
if ((not releases) and details):
releases = [{'details': details}]
if (self.schema_major_version >= 2):
if (not releases):
e = ProviderException(text.format(u'\n No "releases" value for the package "%s" in the repository %s.\n ', (info['name'], self.repo)))
self.broken_packages[info['name']] = e
continue
if (not isinstance(releases, list)):
e = ProviderException(text.format(u'\n The "releases" value is not an array or the package "%s" in the repository %s.\n ', (info['name'], self.repo)))
self.broken_packages[info['name']] = e
continue
for release in releases:
download_details = None
download_info = {}
for field in ['platforms', 'sublime_text', 'version', 'url', 'date', 'dependencies']:
if (field in release):
value = release[field]
if (field == 'url'):
value = update_url(value, debug)
if ((field == 'platforms') and (not isinstance(release['platforms'], list))):
value = [value]
download_info[field] = value
if ('platforms' not in download_info):
download_info['platforms'] = ['*']
if (self.schema_major_version == 2):
if ('sublime_text' not in download_info):
download_info['sublime_text'] = '<3000'
if ('details' in release):
download_details = release['details']
try:
github_downloads = github_client.download_info(download_details)
bitbucket_downloads = bitbucket_client.download_info(download_details)
if ((github_downloads is False) or (bitbucket_downloads is False)):
raise ProviderException(text.format(u'\n No valid semver tags found at %s for the package "%s" in the repository %s.\n ', (download_details, info['name'], self.repo)))
if github_downloads:
downloads = github_downloads
elif bitbucket_downloads:
downloads = bitbucket_downloads
else:
raise ProviderException(text.format(u'\n Invalid "details" value "%s" under the "releases" key\n for the package "%s" in the repository %s.\n ', (download_details, info['name'], self.repo)))
for download in downloads:
new_download = download_info.copy()
new_download.update(download)
info['releases'].append(new_download)
except (DownloaderException, ClientException, ProviderException) as e:
self.broken_packages[info['name']] = e
elif download_info:
info['releases'].append(download_info)
elif (self.schema_major_version == 3):
tags = release.get('tags')
branch = release.get('branch')
if (tags or branch):
try:
base = None
if ('base' in release):
base = release['base']
elif details:
base = details
if (not base):
raise ProviderException(text.format(u'\n Missing root-level "details" key, or release-level "base" key\n for one of the releases of the package "%s" in the repository %s.\n ', (info['name'], self.repo)))
github_url = False
bitbucket_url = False
extra = None
if tags:
github_url = github_client.make_tags_url(base)
bitbucket_url = bitbucket_client.make_tags_url(base)
if (tags is not True):
extra = tags
if branch:
github_url = github_client.make_branch_url(base, branch)
bitbucket_url = bitbucket_client.make_branch_url(base, branch)
if github_url:
downloads = github_client.download_info(github_url, extra)
url = github_url
elif bitbucket_url:
downloads = bitbucket_client.download_info(bitbucket_url, extra)
url = bitbucket_url
else:
raise ProviderException(text.format(u'\n Invalid "base" value "%s" for one of the releases of the\n package "%s" in the repository %s.\n ', (base, info['name'], self.repo)))
if (downloads is False):
raise ProviderException(text.format(u'\n No valid semver tags found at %s for the\n package "%s" in the repository %s.\n ', (url, info['name'], self.repo)))
for download in downloads:
new_download = download_info.copy()
new_download.update(download)
info['releases'].append(new_download)
except (DownloaderException, ClientException, ProviderException) as e:
self.broken_packages[info['name']] = e
continue
elif download_info:
info['releases'].append(download_info)
else:
info['releases'] = platforms_to_releases(package, debug)
info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
if (info['name'] in self.broken_packages):
continue
if ('author' not in info):
self.broken_packages[info['name']] = ProviderException(text.format(u'\n No "author" key for the package "%s" in the repository %s.\n ', (info['name'], self.repo)))
continue
if ('releases' not in info):
self.broken_packages[info['name']] = ProviderException(text.format(u'\n No "releases" key for the package "%s" in the repository %s.\n ', (info['name'], self.repo)))
continue
def has_broken_release():
for release in info.get('releases', []):
for key in ['version', 'date', 'url', 'sublime_text', 'platforms']:
if (key not in release):
self.broken_packages[info['name']] = ProviderException(text.format(u'\n Missing "%s" key for one of the releases of the package "%s" in the repository %s.\n ', (key, info['name'], self.repo)))
return True
return False
if has_broken_release():
continue
for field in ['previous_names', 'labels']:
if (field not in info):
info[field] = []
if ('readme' in info):
info['readme'] = update_url(info['readme'], debug)
for field in ['description', 'readme', 'issues', 'donate', 'buy']:
if (field not in info):
info[field] = None
if ('homepage' not in info):
info['homepage'] = self.repo
if (('releases' in info) and ('last_modified' not in info)):
date = '1970-01-01 00:00:00'
for release in info['releases']:
if (('date' in release) and (release['date'] > date)):
date = release['date']
info['last_modified'] = date
if (info['name'] in previous_names):
info['previous_names'].extend(previous_names[info['name']])
output[info['name']] = info
(yield (info['name'], info))
self.cache['get_packages'] = output
|
'Return a list of current URLs that are directly referenced by the repo
:return:
A list of URLs and/or file paths'
| def get_sources(self):
| if (not self.fetch_and_validate()):
return []
output = [self.repo]
if (self.schema_major_version >= 2):
for package in self.repo_info['packages']:
details = package.get('details')
if details:
output.append(details)
return output
|
':return: A dict of the packages that have been renamed'
| def get_renamed_packages(self):
| if (not self.fetch_and_validate()):
return {}
if (self.schema_major_version < 2):
return self.repo_info.get('renamed_packages', {})
output = {}
for package in self.repo_info['packages']:
if ('previous_names' not in package):
continue
previous_names = package['previous_names']
if (not isinstance(previous_names, list)):
previous_names = [previous_names]
for previous_name in previous_names:
output[previous_name] = package['name']
return output
|
'Indicates if this provider can handle the provided channel'
| @classmethod
def match_url(cls, channel):
| return True
|
'Go out and perform HTTP operations, caching the result
:raises:
ProviderException: when an error occurs trying to open a file
DownloaderException: when an error occurs trying to open a URL'
| def prefetch(self):
| self.fetch()
|
'Retrieves and loads the JSON for other methods to use
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL'
| def fetch(self):
| if (self.channel_info is not None):
return
if re.match('https?://', self.channel, re.I):
with downloader(self.channel, self.settings) as manager:
channel_json = manager.fetch(self.channel, 'Error downloading channel.')
else:
if (not os.path.exists(self.channel)):
raise ProviderException((u'Error, file %s does not exist' % self.channel))
if self.settings.get('debug'):
console_write(u'\n Loading %s as a channel\n ', self.channel)
with open(self.channel, 'rb') as f:
channel_json = f.read()
try:
channel_info = json.loads(channel_json.decode('utf-8'))
except ValueError:
raise ProviderException((u'Error parsing JSON from channel %s.' % self.channel))
schema_error = (u'Channel %s does not appear to be a valid channel file because ' % self.channel)
if ('schema_version' not in channel_info):
raise ProviderException((u'%s the "schema_version" JSON key is missing.' % schema_error))
try:
self.schema_version = channel_info.get('schema_version')
if isinstance(self.schema_version, int):
self.schema_version = float(self.schema_version)
if isinstance(self.schema_version, float):
self.schema_version = str_cls(self.schema_version)
except ValueError:
raise ProviderException((u'%s the "schema_version" is not a valid number.' % schema_error))
if (self.schema_version not in ['1.0', '1.1', '1.2', '2.0', '3.0.0']):
raise ProviderException(text.format(u'\n %s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2, 2.0 or 3.0.0.\n ', schema_error))
version_parts = self.schema_version.split('.')
self.schema_major_version = int(version_parts[0])
debug = self.settings.get('debug')
packages_key = ('packages_cache' if (self.schema_major_version >= 2) else 'packages')
if (packages_key in channel_info):
original_cache = channel_info[packages_key]
new_cache = {}
for repo in original_cache:
new_cache[update_url(repo, debug)] = original_cache[repo]
channel_info[packages_key] = new_cache
self.channel_info = channel_info
|
':raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the mapping for URL slug -> package name'
| def get_name_map(self):
| self.fetch()
if (self.schema_major_version >= 2):
return {}
return self.channel_info.get('package_name_map', {})
|
':raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict of the packages that have been renamed'
| def get_renamed_packages(self):
| self.fetch()
if (self.schema_major_version >= 2):
output = {}
if ('packages_cache' in self.channel_info):
for repo in self.channel_info['packages_cache']:
for package in self.channel_info['packages_cache'][repo]:
previous_names = package.get('previous_names', [])
if (not isinstance(previous_names, list)):
previous_names = [previous_names]
for previous_name in previous_names:
output[previous_name] = package['name']
return output
return self.channel_info.get('renamed_packages', {})
|
':raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A list of the repository URLs'
| def get_repositories(self):
| self.fetch()
if ('repositories' not in self.channel_info):
raise ProviderException(text.format(u'\n Channel %s does not appear to be a valid channel file because\n the "repositories" JSON key is missing.\n ', self.channel))
if (re.match('https?://', self.channel, re.I) is None):
relative_base = os.path.dirname(self.channel)
is_http = False
else:
is_http = True
debug = self.settings.get('debug')
output = []
repositories = self.channel_info.get('repositories', [])
for repository in repositories:
if re.match('^\\./|\\.\\./', repository):
if is_http:
repository = urljoin(self.channel, repository)
else:
repository = os.path.join(relative_base, repository)
repository = os.path.normpath(repository)
output.append(update_url(repository, debug))
return output
|
'Return a list of current URLs that are directly referenced by the
channel
:return:
A list of URLs and/or file paths'
| def get_sources(self):
| return self.get_repositories()
|
'Provides access to the repository info that is cached in a channel
:param repo:
The URL of the repository to get the cached info of
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict in the format:
\'Package Name\': {
\'name\': name,
\'description\': description,
\'author\': author,
\'homepage\': homepage,
\'last_modified\': last modified date,
\'releases\': [
\'sublime_text\': \'*\',
\'platforms\': [\'*\'],
\'url\': url,
\'date\': date,
\'version\': version
\'previous_names\': [old_name, ...],
\'labels\': [label, ...],
\'readme\': url,
\'issues\': url,
\'donate\': url,
\'buy\': url'
| def get_packages(self, repo):
| self.fetch()
repo = update_url(repo, self.settings.get('debug'))
packages_key = ('packages_cache' if (self.schema_major_version >= 2) else 'packages')
if (self.channel_info.get(packages_key, False) is False):
return {}
if (self.channel_info[packages_key].get(repo, False) is False):
return {}
output = {}
for package in self.channel_info[packages_key][repo]:
copy = package.copy()
if (self.schema_major_version < 2):
copy['releases'] = platforms_to_releases(copy, self.settings.get('debug'))
del copy['platforms']
else:
last_modified = None
for release in copy.get('releases', []):
date = release.get('date')
if ((not last_modified) or (date and (date > last_modified))):
last_modified = date
copy['last_modified'] = last_modified
defaults = {'buy': None, 'issues': None, 'labels': [], 'previous_names': [], 'readme': None, 'donate': None}
for field in defaults:
if (field not in copy):
copy[field] = defaults[field]
copy['releases'] = version_sort(copy['releases'], 'platforms', reverse=True)
output[copy['name']] = copy
return output
|
'Provides access to the dependency info that is cached in a channel
:param repo:
The URL of the repository to get the cached info of
:raises:
ProviderException: when an error occurs with the channel contents
DownloaderException: when an error occurs trying to open a URL
:return:
A dict in the format:
\'Dependency Name\': {
\'name\': name,
\'load_order\': two digit string,
\'description\': description,
\'author\': author,
\'issues\': URL,
\'releases\': [
\'sublime_text\': \'*\',
\'platforms\': [\'*\'],
\'url\': url,
\'date\': date,
\'version\': version,
\'sha256\': hex_hash'
| def get_dependencies(self, repo):
| self.fetch()
repo = update_url(repo, self.settings.get('debug'))
if (self.channel_info.get('dependencies_cache', False) is False):
return {}
if (self.channel_info['dependencies_cache'].get(repo, False) is False):
return {}
output = {}
for dependency in self.channel_info['dependencies_cache'][repo]:
dependency['releases'] = version_sort(dependency['releases'], 'platforms', reverse=True)
output[dependency['name']] = dependency
return output
|
'Indicates if this provider can handle the provided repo'
| @classmethod
def match_url(cls, repo):
| master = re.search('^https?://github.com/[^/]+/[^/]+/?$', repo)
branch = re.search('^https?://github.com/[^/]+/[^/]+/tree/[^/]+/?$', repo)
return ((master is not None) or (branch is not None))
|
'Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info'
| def prefetch(self):
| [name for (name, info) in self.get_packages()]
|
'List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://github.com/user/repo", Exception()) tuples'
| def get_failed_sources(self):
| return self.failed_sources.items()
|
'For API-compatibility with RepositoryProvider'
| def get_broken_packages(self):
| return {}.items()
|
'For API-compatibility with RepositoryProvider'
| def get_broken_dependencies(self):
| return {}.items()
|
'For API-compatibility with RepositoryProvider'
| def get_dependencies(self):
| return {}.items()
|
'Uses the GitHub API to construct necessary info for a package
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
\'Package Name\',
\'name\': name,
\'description\': description,
\'author\': author,
\'homepage\': homepage,
\'last_modified\': last modified date,
\'releases\': [
\'sublime_text\': \'*\',
\'platforms\': [\'*\'],
\'url\': url,
\'date\': date,
\'version\': version
\'previous_names\': [],
\'labels\': [],
\'sources\': [the repo URL],
\'readme\': url,
\'issues\': url,
\'donate\': url,
\'buy\': None
tuples'
| def get_packages(self, invalid_sources=None):
| if ('get_packages' in self.cache):
for (key, value) in self.cache['get_packages'].items():
(yield (key, value))
return
client = GitHubClient(self.settings)
if ((invalid_sources is not None) and (self.repo in invalid_sources)):
raise StopIteration()
try:
repo_info = client.repo_info(self.repo)
releases = []
for download in client.download_info(self.repo):
download['sublime_text'] = '*'
download['platforms'] = ['*']
releases.append(download)
name = repo_info['name']
details = {'name': name, 'description': repo_info['description'], 'homepage': repo_info['homepage'], 'author': repo_info['author'], 'last_modified': releases[0].get('date'), 'releases': releases, 'previous_names': [], 'labels': [], 'sources': [self.repo], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], 'buy': None}
self.cache['get_packages'] = {name: details}
(yield (name, details))
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
raise StopIteration()
|
'Return a list of current URLs that are directly referenced by the repo
:return:
A list of URLs'
| def get_sources(self):
| return [self.repo]
|
'For API-compatibility with RepositoryProvider'
| def get_renamed_packages(self):
| return {}
|
'Indicates if this provider can handle the provided repo'
| @classmethod
def match_url(cls, repo):
| return (re.search('^https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) is not None)
|
'Go out and perform HTTP operations, caching the result
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info'
| def prefetch(self):
| [name for (name, info) in self.get_packages()]
|
'List of any URLs that could not be accessed while accessing this repository
:return:
A generator of ("https://bitbucket.org/user/repo", Exception()) tuples'
| def get_failed_sources(self):
| return self.failed_sources.items()
|
'For API-compatibility with RepositoryProvider'
| def get_broken_packages(self):
| return {}.items()
|
'For API-compatibility with RepositoryProvider'
| def get_broken_dependencies(self):
| return {}.items()
|
'For API-compatibility with RepositoryProvider'
| def get_dependencies(self):
| return {}.items()
|
'Uses the BitBucket API to construct necessary info for a package
:param invalid_sources:
A list of URLs that should be ignored
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of
\'Package Name\',
\'name\': name,
\'description\': description,
\'author\': author,
\'homepage\': homepage,
\'last_modified\': last modified date,
\'releases\': [
\'sublime_text\': \'*\',
\'platforms\': [\'*\'],
\'url\': url,
\'date\': date,
\'version\': version
\'previous_names\': [],
\'labels\': [],
\'sources\': [the repo URL],
\'readme\': url,
\'issues\': url,
\'donate\': url,
\'buy\': None
tuples'
| def get_packages(self, invalid_sources=None):
| if ('get_packages' in self.cache):
for (key, value) in self.cache['get_packages'].items():
(yield (key, value))
return
client = BitBucketClient(self.settings)
if ((invalid_sources is not None) and (self.repo in invalid_sources)):
raise StopIteration()
try:
repo_info = client.repo_info(self.repo)
releases = []
for download in client.download_info(self.repo):
download['sublime_text'] = '*'
download['platforms'] = ['*']
releases.append(download)
name = repo_info['name']
details = {'name': name, 'description': repo_info['description'], 'homepage': repo_info['homepage'], 'author': repo_info['author'], 'last_modified': releases[0].get('date'), 'releases': releases, 'previous_names': [], 'labels': [], 'sources': [self.repo], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], 'buy': None}
self.cache['get_packages'] = {name: details}
(yield (name, details))
except (DownloaderException, ClientException, ProviderException) as e:
self.failed_sources[self.repo] = e
self.cache['get_packages'] = {}
raise StopIteration()
|
'Return a list of current URLs that are directly referenced by the repo
:return:
A list of URLs'
| def get_sources(self):
| return [self.repo]
|
'For API-compatibility with RepositoryProvider'
| def get_renamed_packages(self):
| return {}
|
'Loads a BER/DER-encoded byte string using the current class as the spec
:param encoded_data:
A byte string of BER or DER-encoded data
:param strict:
A boolean indicating if trailing data should be forbidden - if so, a
ValueError will be raised when trailing data exists
:return:
An instance of the current class'
| @classmethod
def load(cls, encoded_data, strict=False, **kwargs):
| if (not isinstance(encoded_data, byte_cls)):
raise TypeError((u'encoded_data must be a byte string, not %s' % type_name(encoded_data)))
spec = None
if (cls.tag is not None):
spec = cls
(value, _) = _parse_build(encoded_data, spec=spec, spec_params=kwargs, strict=strict)
return value
|
'The optional parameter is not used, but rather included so we don\'t
have to delete it from the parameter dictionary when passing as keyword
args
:param tag_type:
None for normal values, or one of "implicit", "explicit" for tagged
values
:param class_:
The class for the value - defaults to "universal" if tag_type is
None, otherwise defaults to "context". Valid values include:
- "universal"
- "application"
- "context"
- "private"
:param tag:
The integer tag to override - usually this is used with tag_type or
class_
:param optional:
Dummy parameter that allows "optional" key in spec param dicts
:param default:
The default value to use if the value is currently None
:param contents:
A byte string of the encoded contents of the value
:raises:
ValueError - when tag_type, class_ or tag are invalid values'
| def __init__(self, tag_type=None, class_=None, tag=None, optional=None, default=None, contents=None):
| try:
if (self.__class__ not in _SETUP_CLASSES):
cls = self.__class__
if hasattr(cls, u'_setup'):
self._setup()
_SETUP_CLASSES[cls] = True
if (tag_type is not None):
if (tag_type not in (u'implicit', u'explicit')):
raise ValueError(unwrap(u'\n tag_type must be one of "implicit", "explicit", not %s\n ', repr(tag_type)))
self.tag_type = tag_type
if (class_ is None):
class_ = u'context'
if (class_ not in CLASS_NAME_TO_NUM_MAP):
raise ValueError(unwrap(u'\n class_ must be one of "universal", "application",\n "context", "private", not %s\n ', repr(class_)))
class_ = CLASS_NAME_TO_NUM_MAP[class_]
if (tag is not None):
if (not isinstance(tag, int_types)):
raise TypeError(unwrap(u'\n tag must be an integer, not %s\n ', type_name(tag)))
if (tag_type == u'implicit'):
self.class_ = class_
self.tag = tag
else:
self.explicit_class = class_
self.explicit_tag = tag
else:
if (class_ is not None):
if (class_ not in CLASS_NUM_TO_NAME_MAP):
raise ValueError(unwrap(u'\n class_ must be one of "universal", "application",\n "context", "private", not %s\n ', repr(class_)))
self.class_ = CLASS_NAME_TO_NUM_MAP[class_]
if (tag is not None):
self.tag = tag
if (contents is not None):
self.contents = contents
elif (default is not None):
self.set(default)
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (((e.args[0] + (u'\n while constructing %s' % type_name(self))),) + args)
raise e
|
'Since str is differnt in Python 2 and 3, this calls the appropriate
method, __unicode__() or __bytes__()
:return:
A unicode string'
| def __str__(self):
| if _PY2:
return self.__bytes__()
else:
return self.__unicode__()
|
':return:
A unicode string'
| def __repr__(self):
| if _PY2:
return (u'<%s %s b%s>' % (type_name(self), id(self), repr(self.dump())))
else:
return (u'<%s %s %s>' % (type_name(self), id(self), repr(self.dump())))
|
'A fall-back method for print() in Python 2
:return:
A byte string of the output of repr()'
| def __bytes__(self):
| return self.__repr__().encode(u'utf-8')
|
'A fall-back method for print() in Python 3
:return:
A unicode string of the output of repr()'
| def __unicode__(self):
| return self.__repr__()
|
'Constructs a new copy of the current object, preserving any tagging
:return:
An Asn1Value object'
| def _new_instance(self):
| new_obj = self.__class__()
new_obj.tag_type = self.tag_type
new_obj.class_ = self.class_
new_obj.tag = self.tag
new_obj.explicit_class = self.explicit_class
new_obj.explicit_tag = self.explicit_tag
return new_obj
|
'Implements the copy.copy() interface
:return:
A new shallow copy of the current Asn1Value object'
| def __copy__(self):
| new_obj = self._new_instance()
new_obj._copy(self, copy.copy)
return new_obj
|
'Implements the copy.deepcopy() interface
:param memo:
A dict for memoization
:return:
A new deep copy of the current Asn1Value object'
| def __deepcopy__(self, memo):
| new_obj = self._new_instance()
memo[id(self)] = new_obj
new_obj._copy(self, copy.deepcopy)
return new_obj
|
'Copies the object, preserving any special tagging from it
:return:
An Asn1Value object'
| def copy(self):
| return copy.deepcopy(self)
|
'Copies the object, applying a new tagging to it
:param tag_type:
A unicode string of "implicit" or "explicit"
:param tag:
A integer tag number
:return:
An Asn1Value object'
| def retag(self, tag_type, tag):
| new_obj = self.__class__(tag_type=tag_type, tag=tag)
new_obj._copy(self, copy.deepcopy)
return new_obj
|
'Copies the object, removing any special tagging from it
:return:
An Asn1Value object'
| def untag(self):
| new_obj = self.__class__()
new_obj._copy(self, copy.deepcopy)
return new_obj
|
'Copies the contents of another Asn1Value object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects'
| def _copy(self, other, copy_func):
| if (self.__class__ != other.__class__):
raise TypeError(unwrap(u'\n Can not copy values from %s object to %s object\n ', type_name(other), type_name(self)))
self.contents = other.contents
self._native = copy_func(other._native)
|
'Show the binary data and parsed data in a tree structure'
| def debug(self, nest_level=1):
| prefix = (u' ' * nest_level)
has_parsed = hasattr(self, u'parsed')
_basic_debug(prefix, self)
if has_parsed:
self.parsed.debug((nest_level + 2))
elif hasattr(self, u'chosen'):
self.chosen.debug((nest_level + 2))
elif (_PY2 and isinstance(self.native, byte_cls)):
print((u'%s Native: b%s' % (prefix, repr(self.native))))
else:
print((u'%s Native: %s' % (prefix, self.native)))
|
'Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value'
| def dump(self, force=False):
| contents = self.contents
if ((self._header is None) or force):
if (isinstance(self, Constructable) and self._indefinite):
self.method = 0
header = _dump_header(self.class_, self.method, self.tag, self.contents)
trailer = ''
if (self.tag_type == u'explicit'):
container = Asn1Value()
container.method = 1
container.class_ = self.explicit_class
container.tag = self.explicit_tag
container.contents = ((header + self.contents) + trailer)
container.dump()
header = (container._header + header)
trailer += container._trailer
self._header = header
self._trailer = trailer
return ((self._header + contents) + self._trailer)
|
'Generates _reverse_map from _map'
| def _setup(self):
| cls = self.__class__
if ((cls._map is None) or (cls._reverse_map is not None)):
return
cls._reverse_map = {}
for (key, value) in cls._map.items():
cls._reverse_map[value] = key
|
'Converts the current object into an object of a different class. The
new class must use the ASN.1 encoding for the value.
:param other_class:
The class to instantiate the new object from
:return:
An instance of the type other_class'
| def cast(self, other_class):
| if (other_class.tag != self.__class__.tag):
raise TypeError(unwrap(u'\n Can not covert a value from %s object to %s object since they\n use different tags: %d versus %d\n ', type_name(other_class), type_name(self), other_class.tag, self.__class__.tag))
new_obj = other_class()
new_obj.tag_type = self.tag_type
new_obj.class_ = self.class_
new_obj.explicit_class = self.explicit_class
new_obj.explicit_tag = self.explicit_tag
new_obj._header = self._header
new_obj.contents = self.contents
new_obj._trailer = self._trailer
if isinstance(self, Constructable):
new_obj.method = self.method
new_obj._indefinite = self._indefinite
return new_obj
|
':return:
A concatenation of the native values of the contained chunks'
| def _merge_chunks(self):
| if (not self._indefinite):
return self._as_chunk()
pointer = self._chunks_offset
contents_len = len(self.contents)
output = None
while (pointer < contents_len):
(sub_value, pointer) = _parse_build(self.contents, pointer, spec=self.__class__)
if (output is None):
output = sub_value._merge_chunks()
else:
output += sub_value._merge_chunks()
if (output is None):
return self._as_chunk()
return output
|
'A method to return a chunk of data that can be combined for
constructed method values
:return:
A native Python value that can be added together. Examples include
byte strings, unicode strings or tuples.'
| def _as_chunk(self):
| if (self._chunks_offset == 0):
return self.contents
return self.contents[self._chunks_offset:]
|
'Copies the contents of another Constructable object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects'
| def _copy(self, other, copy_func):
| super(Constructable, self)._copy(other, copy_func)
self.method = other.method
self._indefinite = other._indefinite
|
':param other:
The other Primitive to compare to
:return:
A boolean'
| def __eq__(self, other):
| return (other.__class__ == self.__class__)
|
'The a native Python datatype representation of this value
:return:
None'
| @property
def native(self):
| return None
|
'Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value'
| def dump(self, force=False):
| return ''
|
'Sets the value of the object before passing to Asn1Value.__init__()
:param value:
An Asn1Value object that will be set as the parsed value'
| def __init__(self, value=None, **kwargs):
| Asn1Value.__init__(self, **kwargs)
try:
if (value is not None):
if (not isinstance(value, Asn1Value)):
raise TypeError(unwrap(u'\n value must be an instance of Ans1Value, not %s\n ', type_name(value)))
self._parsed = (value, value.__class__, None)
self.contents = value.dump()
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (((e.args[0] + (u'\n while constructing %s' % type_name(self))),) + args)
raise e
|
'The a native Python datatype representation of this value
:return:
The .native value from the parsed value object'
| @property
def native(self):
| if (self._parsed is None):
self.parse()
return self._parsed[0].native
|
'Returns the parsed object from .parse()
:return:
The object returned by .parse()'
| @property
def parsed(self):
| if (self._parsed is None):
self.parse()
return self._parsed[0]
|
'Parses the contents generically, or using a spec with optional params
:param spec:
A class derived from Asn1Value that defines what class_ and tag the
value should have, and the semantics of the encoded value. The
return value will be of this type. If omitted, the encoded value
will be decoded using the standard universal tag based on the
encoded tag number.
:param spec_params:
A dict of params to pass to the spec object
:return:
An object of the type spec, or if not present, a child of Asn1Value'
| def parse(self, spec=None, spec_params=None):
| if ((self._parsed is None) or (self._parsed[1:3] != (spec, spec_params))):
try:
passed_params = spec_params
if (self.tag_type == u'explicit'):
passed_params = ({} if (not spec_params) else spec_params.copy())
passed_params[u'tag_type'] = self.tag_type
passed_params[u'tag'] = self.explicit_tag
contents = ((self._header + self.contents) + self._trailer)
(parsed_value, _) = _parse_build(contents, spec=spec, spec_params=passed_params)
self._parsed = (parsed_value, spec, spec_params)
self.tag_type = None
self.tag = None
self._header = ''
self.contents = contents
self._trailer = ''
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (((e.args[0] + (u'\n while parsing %s' % type_name(self))),) + args)
raise e
return self._parsed[0]
|
'Copies the contents of another Any object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects'
| def _copy(self, other, copy_func):
| super(Any, self)._copy(other, copy_func)
self._parsed = copy_func(other._parsed)
|
'Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value'
| def dump(self, force=False):
| if (self._parsed is None):
self.parse()
return self._parsed[0].dump(force=force)
|
'Loads a BER/DER-encoded byte string using the current class as the spec
:param encoded_data:
A byte string of BER or DER encoded data
:param strict:
A boolean indicating if trailing data should be forbidden - if so, a
ValueError will be raised when trailing data exists
:return:
A instance of the current class'
| @classmethod
def load(cls, encoded_data, strict=False, **kwargs):
| if (not isinstance(encoded_data, byte_cls)):
raise TypeError((u'encoded_data must be a byte string, not %s' % type_name(encoded_data)))
(value, _) = _parse_build(encoded_data, spec=cls, spec_params=kwargs, strict=strict)
return value
|
'Generates _id_map from _alternatives to allow validating contents'
| def _setup(self):
| cls = self.__class__
cls._id_map = {}
cls._name_map = {}
for (index, info) in enumerate(cls._alternatives):
if (len(info) < 3):
info = (info + ({},))
cls._alternatives[index] = info
id_ = _build_id_tuple(info[2], info[1])
cls._id_map[id_] = index
cls._name_map[info[0]] = index
|
'Checks to ensure implicit tagging is not being used since it is
incompatible with Choice, then forwards on to Asn1Value.__init__()
:param name:
The name of the alternative to be set - used with value.
Alternatively this may be a dict with a single key being the name
and the value being the value, or a two-element tuple of the the
name and the value.
:param value:
The alternative value to set - used with name
:param tag_type:
The tag_type of the value - None, "implicit" or "explicit"
:raises:
ValueError - when tag_type is "implicit"'
| def __init__(self, name=None, value=None, tag_type=None, **kwargs):
| kwargs[u'tag_type'] = tag_type
Asn1Value.__init__(self, **kwargs)
try:
if (tag_type == u'implicit'):
raise ValueError(unwrap(u'\n The Choice type can not be implicitly tagged even if in an\n implicit module - due to its nature any tagging must be\n explicit\n '))
if (name is not None):
if isinstance(name, dict):
if (len(name) != 1):
raise ValueError(unwrap(u'\n When passing a dict as the "name" argument to %s,\n it must have a single key/value - however %d were\n present\n ', type_name(self), len(name)))
(name, value) = list(name.items())[0]
if isinstance(name, tuple):
if (len(name) != 2):
raise ValueError(unwrap(u'\n When passing a tuple as the "name" argument to %s,\n it must have two elements, the name and value -\n however %d were present\n ', type_name(self), len(name)))
value = name[1]
name = name[0]
if (name not in self._name_map):
raise ValueError(unwrap(u'\n The name specified, "%s", is not a valid alternative\n for %s\n ', name, type_name(self)))
self._choice = self._name_map[name]
(_, spec, params) = self._alternatives[self._choice]
if (not isinstance(value, spec)):
value = spec(value, **params)
else:
value = _fix_tagging(value, params)
self._parsed = value
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (((e.args[0] + (u'\n while constructing %s' % type_name(self))),) + args)
raise e
|
':return:
A unicode string of the field name of the chosen alternative'
| @property
def name(self):
| if (not self._name):
self._name = self._alternatives[self._choice][0]
return self._name
|
'Parses the detected alternative
:return:
An Asn1Value object of the chosen alternative'
| def parse(self):
| if (self._parsed is not None):
return self._parsed
try:
(_, spec, params) = self._alternatives[self._choice]
(self._parsed, _) = _parse_build(self.contents, spec=spec, spec_params=params)
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (((e.args[0] + (u'\n while parsing %s' % type_name(self))),) + args)
raise e
|
':return:
An Asn1Value object of the chosen alternative'
| @property
def chosen(self):
| return self.parse()
|
'The a native Python datatype representation of this value
:return:
The .native value from the contained value object'
| @property
def native(self):
| return self.chosen.native
|
'Ensures that the class and tag specified exist as an alternative
:param class_:
The integer class_ from the encoded value header
:param tag:
The integer tag from the encoded value header
:param contents:
A byte string of the contents of the value - used when the object
is explicitly tagged
:raises:
ValueError - when value is not a valid alternative'
| def validate(self, class_, tag, contents):
| id_ = (class_, tag)
if (self.tag_type == u'explicit'):
if ((self.explicit_class, self.explicit_tag) != id_):
raise ValueError(unwrap(u'\n %s was explicitly tagged, but the value provided does not\n match the class and tag\n ', type_name(self)))
((class_, _, tag, _, _, _), _) = _parse(contents, len(contents))
id_ = (class_, tag)
if (id_ in self._id_map):
self._choice = self._id_map[id_]
return
if ((self.class_ is not None) and (self.tag is not None)):
if (len(self._alternatives) > 1):
raise ValueError(unwrap(u'\n %s was implicitly tagged, but more than one alternative\n exists\n ', type_name(self)))
if (id_ == (self.class_, self.tag)):
self._choice = 0
return
asn1 = self._format_class_tag(class_, tag)
asn1s = [self._format_class_tag(pair[0], pair[1]) for pair in self._id_map]
raise ValueError(unwrap(u'\n Value %s did not match the class and tag of any of the alternatives\n in %s: %s\n ', asn1, type_name(self), u', '.join(asn1s)))
|
':return:
A unicode string of a human-friendly representation of the class and tag'
| def _format_class_tag(self, class_, tag):
| return (u'[%s %s]' % (CLASS_NUM_TO_NAME_MAP[class_].upper(), tag))
|
'Copies the contents of another Choice object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects'
| def _copy(self, other, copy_func):
| super(Choice, self)._copy(other, copy_func)
self._choice = other._choice
self._name = other._name
self._parsed = copy_func(other._parsed)
|
'Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value'
| def dump(self, force=False):
| self.contents = self.chosen.dump(force=force)
if ((self._header is None) or force):
if (self.tag_type == u'explicit'):
self._header = _dump_header(self.explicit_class, 1, self.explicit_tag, self.contents)
else:
self._header = ''
return (self._header + self.contents)
|
'Loads a BER/DER-encoded byte string using the current class as the spec
:param encoded_data:
A byte string of BER or DER encoded data
:param strict:
A boolean indicating if trailing data should be forbidden - if so, a
ValueError will be raised when trailing data exists
:return:
A Concat object'
| @classmethod
def load(cls, encoded_data, strict=False):
| return cls(contents=encoded_data, strict=strict)
|
':param value:
A native Python datatype to initialize the object value with
:param contents:
A byte string of the encoded contents of the value
:param strict:
A boolean indicating if trailing data should be forbidden - if so, a
ValueError will be raised when trailing data exists in contents
:raises:
ValueError - when an error occurs with one of the children
TypeError - when an error occurs with one of the children'
| def __init__(self, value=None, contents=None, strict=False):
| if (contents is not None):
try:
contents_len = len(contents)
self._children = []
offset = 0
for spec in self._child_specs:
if (offset < contents_len):
(child_value, offset) = _parse_build(contents, pointer=offset, spec=spec)
else:
child_value = spec()
self._children.append(child_value)
if (strict and (offset != contents_len)):
extra_bytes = (contents_len - offset)
raise ValueError((u'Extra data - %d bytes of trailing data were provided' % extra_bytes))
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (((e.args[0] + (u'\n while constructing %s' % type_name(self))),) + args)
raise e
if (value is not None):
if (self._children is None):
self._children = ([None] * len(self._child_specs))
for (index, data) in enumerate(value):
self.__setitem__(index, data)
|
'Since str is differnt in Python 2 and 3, this calls the appropriate
method, __unicode__() or __bytes__()
:return:
A unicode string'
| def __str__(self):
| if _PY2:
return self.__bytes__()
else:
return self.__unicode__()
|
'A byte string of the DER-encoded contents'
| def __bytes__(self):
| return self.dump()
|
':return:
A unicode string'
| def __unicode__(self):
| return repr(self)
|
':return:
A unicode string'
| def __repr__(self):
| return (u'<%s %s %s>' % (type_name(self), id(self), repr(self.dump())))
|
'Implements the copy.copy() interface
:return:
A new shallow copy of the Concat object'
| def __copy__(self):
| new_obj = self.__class__()
new_obj._copy(self, copy.copy)
return new_obj
|
'Implements the copy.deepcopy() interface
:param memo:
A dict for memoization
:return:
A new deep copy of the Concat object and all child objects'
| def __deepcopy__(self, memo):
| new_obj = self.__class__()
memo[id(self)] = new_obj
new_obj._copy(self, copy.deepcopy)
return new_obj
|
'Copies the object
:return:
A Concat object'
| def copy(self):
| return copy.deepcopy(self)
|
'Copies the contents of another Concat object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects'
| def _copy(self, other, copy_func):
| if (self.__class__ != other.__class__):
raise TypeError(unwrap(u'\n Can not copy values from %s object to %s object\n ', type_name(other), type_name(self)))
self._children = copy_func(other._children)
|
'Show the binary data and parsed data in a tree structure'
| def debug(self, nest_level=1):
| prefix = (u' ' * nest_level)
print((u'%s%s Object #%s' % (prefix, type_name(self), id(self))))
print((u'%s Children:' % (prefix,)))
for child in self._children:
child.debug((nest_level + 2))
|
'Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value'
| def dump(self, force=False):
| contents = ''
for child in self._children:
contents += child.dump(force=force)
return contents
|
':return:
A byte string of the DER-encoded contents of the children'
| @property
def contents(self):
| return self.dump()
|
':return:
Integer'
| def __len__(self):
| return len(self._children)
|
'Allows accessing children by index
:param key:
An integer of the child index
:raises:
KeyError - when an index is invalid
:return:
The Asn1Value object of the child specified'
| def __getitem__(self, key):
| if ((key > (len(self._child_specs) - 1)) or (key < 0)):
raise KeyError(unwrap(u'\n No child is definition for position %d of %s\n ', key, type_name(self)))
return self._children[key]
|
'Allows settings children by index
:param key:
An integer of the child index
:param value:
An Asn1Value object to set the child to
:raises:
KeyError - when an index is invalid
ValueError - when the value is not an instance of Asn1Value'
| def __setitem__(self, key, value):
| if ((key > (len(self._child_specs) - 1)) or (key < 0)):
raise KeyError(unwrap(u'\n No child is defined for position %d of %s\n ', key, type_name(self)))
if (not isinstance(value, Asn1Value)):
raise ValueError(unwrap(u'\n Value for child %s of %s is not an instance of\n asn1crypto.core.Asn1Value\n ', key, type_name(self)))
self._children[key] = value
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.