desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Connect to hosts in hosts list. Returns status of connect as a dict.
:param raise_on_any_error: Optional Raise an exception even if connecting to one
of the hosts fails.
:type raise_on_any_error: ``boolean``
:rtype: ``dict`` of ``str`` to ``dict``'
| def connect(self, raise_on_any_error=False):
| results = {}
for host in self._hosts:
while (not self._pool.free()):
eventlet.sleep(self._scan_interval)
self._pool.spawn(self._connect, host=host, results=results, raise_on_any_error=raise_on_any_error)
self._pool.waitall()
if (self._successful_connects < 1):
LOG.error('Unable to connect to any of the hosts.', extra={'connect_results': results})
msg = ('Unable to connect to any one of the hosts: %s.\n\n connect_errors=%s' % (self._hosts, json.dumps(results, indent=2)))
raise NoHostsConnectedToException(msg)
return results
|
'Run a command on remote hosts. Returns a dict containing results
of execution from all hosts.
:param cmd: Command to run. Must be shlex quoted.
:type cmd: ``str``
:param timeout: Optional Timeout for the command.
:type timeout: ``int``
:param cwd: Optional Current working directory. Must be shlex quoted.
:type cwd: ``str``
:rtype: ``dict`` of ``str`` to ``dict``'
| def run(self, cmd, timeout=None):
| options = {'cmd': cmd, 'timeout': timeout}
results = self._execute_in_pool(self._run_command, **options)
return results
|
'Copy a file or folder to remote host.
:param local_path: Path to local file or dir. Must be shlex quoted.
:type local_path: ``str``
:param remote_path: Path to remote file or dir. Must be shlex quoted.
:type remote_path: ``str``
:param mode: Optional mode to use for the file or dir.
:type mode: ``int``
:param mirror_local_mode: Optional Flag to mirror the mode
on local file/dir on remote host.
:type mirror_local_mode: ``boolean``
:rtype: ``dict`` of ``str`` to ``dict``'
| def put(self, local_path, remote_path, mode=None, mirror_local_mode=False):
| if (not os.path.exists(local_path)):
raise Exception(('Local path %s does not exist.' % local_path))
options = {'local_path': local_path, 'remote_path': remote_path, 'mode': mode, 'mirror_local_mode': mirror_local_mode}
return self._execute_in_pool(self._put_files, **options)
|
'Create a directory on remote hosts.
:param path: Path to remote dir that must be created. Must be shlex quoted.
:type path: ``str``
:rtype path: ``dict`` of ``str`` to ``dict``'
| def mkdir(self, path):
| options = {'path': path}
return self._execute_in_pool(self._mkdir, **options)
|
'Delete a file on remote hosts.
:param path: Path to remote file that must be deleted. Must be shlex quoted.
:type path: ``str``
:rtype path: ``dict`` of ``str`` to ``dict``'
| def delete_file(self, path):
| options = {'path': path}
return self._execute_in_pool(self._delete_file, **options)
|
'Delete a dir on remote hosts.
:param path: Path to remote dir that must be deleted. Must be shlex quoted.
:type path: ``str``
:rtype path: ``dict`` of ``str`` to ``dict``'
| def delete_dir(self, path, force=False, timeout=None):
| options = {'path': path, 'force': force}
return self._execute_in_pool(self._delete_dir, **options)
|
'Close all open SSH connections to hosts.'
| def close(self):
| for host in self._hosts_client.keys():
try:
self._hosts_client[host].close()
except:
LOG.exception('Failed shutting down SSH connection to host: %s', host)
|
'Remove any potentially sensitive information from the command string.
For now we only mask the values of the sensitive environment variables.'
| @staticmethod
def _sanitize_command_string(cmd):
| if (not cmd):
return cmd
result = re.sub('ST2_ACTION_AUTH_TOKEN=(.+?)\\s+?', ('ST2_ACTION_AUTH_TOKEN=%s ' % MASKED_ATTRIBUTE_VALUE), cmd)
return result
|
':param exc: Raised exception.
:type exc: Exception.
:param message: Error message which will be prefixed to the exception exception message.
:type message: ``str``'
| @staticmethod
def _generate_error_result(exc, message):
| exc_message = getattr(exc, 'message', str(exc))
error_message = ('%s %s' % (message, exc_message))
traceback_message = traceback.format_exc()
if isinstance(exc, SSHCommandTimeoutError):
return_code = (-9)
timeout = True
else:
timeout = False
return_code = 255
stdout = (getattr(exc, 'stdout', None) or '')
stderr = (getattr(exc, 'stderr', None) or '')
error_dict = {'failed': True, 'succeeded': False, 'timeout': timeout, 'return_code': return_code, 'stdout': stdout, 'stderr': stderr, 'error': error_message, 'traceback': traceback_message}
return error_dict
|
'Apply the policy before the target do work.
:param target: The instance of the resource being affected by this policy.
:type target: ``object``
:rtype: ``object``'
| def apply_before(self, target):
| if (not coordination.configured()):
LOG.warn('Coordination service is not configured. Policy enforcement is best effort.')
return target
|
'Apply the policy after the target does work.
:param target: The instance of the resource being affected by this policy.
:type target: ``object``
:rtype: ``object``'
| def apply_after(self, target):
| if (not coordination.configured()):
LOG.warn('Coordination service is not configured. Policy enforcement is best effort.')
return target
|
'Return a safe string which can be used as a lock name.
:param values: Dictionary with values to use in the lock name.
:type values: ``dict``
:rtype: ``st``'
| def _get_lock_name(self, values):
| lock_uid = []
for (key, value) in six.iteritems(values):
lock_uid.append(('%s=%s' % (key, value)))
lock_uid = ','.join(lock_uid)
return lock_uid
|
'Assign dynamic config value for a particular config item if the ite utilizes a Jinja
expression for dynamic config values.
Note: This method mutates config argument in place.
:rtype: ``dict``'
| def _assign_dynamic_config_values(self, schema, config, parent_keys=None):
| parent_keys = (parent_keys or [])
for (config_item_key, config_item_value) in six.iteritems(config):
schema_item = schema.get(config_item_key, {})
is_dictionary = isinstance(config_item_value, dict)
if is_dictionary:
parent_keys += [config_item_key]
self._assign_dynamic_config_values(schema=schema_item.get('properties', {}), config=config[config_item_key], parent_keys=parent_keys)
else:
is_jinja_expression = jinja_utils.is_jinja_expression(value=config_item_value)
if is_jinja_expression:
full_config_item_key = '.'.join((parent_keys + [config_item_key]))
value = self._get_datastore_value_for_expression(key=full_config_item_key, value=config_item_value, config_schema_item=schema_item)
config[config_item_key] = value
else:
config[config_item_key] = config_item_value
return config
|
'Assign default values for particular config if default values are provided in the config
schema and a value is not specified in the config.
Note: This method mutates config argument in place.
:rtype: ``dict``'
| def _assign_default_values(self, schema, config):
| for (schema_item_key, schema_item) in six.iteritems(schema):
has_default_value = ('default' in schema_item)
has_config_value = (schema_item_key in config)
default_value = schema_item.get('default', None)
is_object = (schema_item.get('type', None) == 'object')
has_properties = schema_item.get('properties', None)
if (has_default_value and (not has_config_value)):
config[schema_item_key] = default_value
if (is_object and has_properties):
if (not config.get(schema_item_key, None)):
config[schema_item_key] = {}
self._assign_default_values(schema=schema_item['properties'], config=config[schema_item_key])
return config
|
'Retrieve datastore value by first resolving the datastore expression and then retrieving
the value from the datastore.
:param key: Full path to the config item key (e.g. "token" / "auth.settings.token", etc.)'
| def _get_datastore_value_for_expression(self, key, value, config_schema_item=None):
| from st2common.services.config import deserialize_key_value
config_schema_item = (config_schema_item or {})
secret = config_schema_item.get('secret', False)
try:
value = render_template_with_system_and_user_context(value=value, user=self.user)
except Exception as e:
exc_class = type(e)
original_msg = str(e)
msg = ('Failed to render dynamic configuration value for key "%s" with value "%s" for pack "%s" config: %s ' % (key, value, self.pack_name, original_msg))
raise exc_class(msg)
if value:
value = deserialize_key_value(value=value, secret=secret)
else:
value = None
return value
|
'Retrieve config for a particular pack.
:return: Config object if config is found, ``None`` otherwise.
:rtype: :class:`.ContentPackConfig` or ``None``'
| def get_config(self):
| global_config_path = self.get_global_config_path()
config = self.get_and_parse_config(config_path=global_config_path)
return config
|
'Retrieve config for a particular action inside the content pack.
:param action_file_path: Full absolute path to the action file.
:type action_file_path: ``str``
:return: Config object if config is found, ``None`` otherwise.
:rtype: :class:`.ContentPackConfig` or ``None``'
| def get_action_config(self, action_file_path):
| global_config_path = self.get_global_config_path()
config = self.get_and_parse_config(config_path=global_config_path)
return config
|
'Retrieve config for a particular sensor inside the content pack.
:param sensor_file_path: Full absolute path to the sensor file.
:type sensor_file_path: ``str``
:return: Config object if config is found, ``None`` otherwise.
:rtype: :class:`.ContentPackConfig` or ``None``'
| def get_sensor_config(self, sensor_file_path):
| global_config_path = self.get_global_config_path()
config = self.get_and_parse_config(config_path=global_config_path)
return config
|
'Called before acknowleding a message. Good place to track the message via a DB entry or some
other applicable mechnism.
The reponse of this method is passed into the ``process`` method. This was whatever is the
processed version of the message can be moved forward. It is always possible to simply
return ``message`` and have ``process`` handle the original message.'
| @abc.abstractmethod
def pre_ack_process(self, message):
| pass
|
'Run the wrapped_callback in a protective covering of retries and error handling.
:param connection: Connection to messaging service
:type connection: kombu.connection.Connection
:param wrapped_callback: Callback that will be wrapped by all the fine handling in this
method. Expected signature of callback -
``def func(connection, channel)``'
| def run(self, connection, wrapped_callback):
| should_stop = False
channel = None
while (not should_stop):
try:
channel = connection.channel()
wrapped_callback(connection=connection, channel=channel)
should_stop = True
except (connection.connection_errors + connection.channel_errors) as e:
self._logger.exception(('Connection or channel error identified: %s.' % str(e)))
(should_stop, wait) = self._retry_context.test_should_stop()
channel = None
if should_stop:
raise
eventlet.sleep(wait)
connection.close()
connection.ensure_connection()
except Exception as e:
self._logger.exception('Connections to rabbitmq cannot be re-established: %s', e.message)
raise
finally:
if (should_stop and channel):
try:
channel.close()
except Exception:
self._logger.warning('Error closing channel.', exc_info=True)
|
'Ensure that recoverable errors are retried a set number of times before giving up.
:param connection: Connection to messaging service
:type connection: kombu.connection.Connection
:param obj: Object whose method is to be ensured. Typically, channel, producer etc. from
the kombu library.
:type obj: Must support mixin kombu.abstract.MaybeChannelBound'
| def ensured(self, connection, obj, to_ensure_func, **kwargs):
| ensuring_func = connection.ensure(obj, to_ensure_func, errback=self.errback, max_retries=3)
ensuring_func(**kwargs)
|
'Method which dispatches the trigger.
:param trigger: Full name / reference of the trigger.
:type trigger: ``str`` or ``object``
:param payload: Trigger payload.
:type payload: ``dict``
:param trace_context: Trace context to associate with Trigger.
:type trace_context: ``TraceContext``'
| def dispatch(self, trigger, payload=None, trace_context=None):
| assert isinstance(payload, (type(None), dict))
assert isinstance(trace_context, (type(None), TraceContext))
payload = {'trigger': trigger, 'payload': payload, TRACE_CONTEXT: trace_context}
routing_key = 'trigger_instance'
self._logger.debug('Dispatching trigger (trigger=%s,payload=%s)', trigger, payload)
self._publisher.publish_trigger(payload=payload, routing_key=routing_key)
|
'Method which dispatches the announcement.
:param routing_key: Routing key of the announcement.
:type routing_key: ``str``
:param payload: Announcement payload.
:type payload: ``dict``
:param trace_context: Trace context to associate with Announcement.
:type trace_context: ``TraceContext``'
| def dispatch(self, routing_key, payload, trace_context=None):
| assert isinstance(payload, (type(None), dict))
assert isinstance(trace_context, (type(None), dict, TraceContext))
payload = {'payload': payload, TRACE_CONTEXT: trace_context}
self._logger.debug('Dispatching announcement (routing_key=%s,payload=%s)', routing_key, payload)
self._publisher.publish(payload=payload, routing_key=routing_key)
|
':param create_handler: Function which is called on SensorDB create event.
:type create_handler: ``callable``
:param update_handler: Function which is called on SensorDB update event.
:type update_handler: ``callable``
:param delete_handler: Function which is called on SensorDB delete event.
:type delete_handler: ``callable``'
| def __init__(self, create_handler, update_handler, delete_handler, queue_suffix=None):
| self._create_handler = create_handler
self._update_handler = update_handler
self._delete_handler = delete_handler
self._sensor_watcher_q = self._get_queue(queue_suffix)
self.connection = None
self._updates_thread = None
self._handlers = {publishers.CREATE_RK: create_handler, publishers.UPDATE_RK: update_handler, publishers.DELETE_RK: delete_handler}
|
':param create_handler: Function which is called on TriggerDB create event.
:type create_handler: ``callable``
:param update_handler: Function which is called on TriggerDB update event.
:type update_handler: ``callable``
:param delete_handler: Function which is called on TriggerDB delete event.
:type delete_handler: ``callable``
:param trigger_types: If provided, handler function will only be called
if the trigger in the message payload is included
in this list.
:type trigger_types: ``list``
:param exclusive: If the Q is exclusive to a specific connection which is then
single connection created by TriggerWatcher. When the connection
breaks the Q is removed by the message broker.
:type exclusive: ``bool``'
| def __init__(self, create_handler, update_handler, delete_handler, trigger_types=None, queue_suffix=None, exclusive=False):
| self._create_handler = create_handler
self._update_handler = update_handler
self._delete_handler = delete_handler
self._trigger_types = trigger_types
self._trigger_watch_q = self._get_queue(queue_suffix, exclusive=exclusive)
self.connection = None
self._load_thread = None
self._updates_thread = None
self._handlers = {publishers.CREATE_RK: create_handler, publishers.UPDATE_RK: update_handler, publishers.DELETE_RK: delete_handler}
|
'Retrieve all the datastores items.
:param local: List values from a namespace local to this pack/class. Defaults to True.
:type: local: ``bool``
:param prefix: Optional key name prefix / startswith filter.
:type prefix: ``str``
:rtype: ``list`` of :class:`KeyValuePair`'
| def list_values(self, local=True, prefix=None):
| client = self._get_api_client()
self._logger.audit('Retrieving all the value from the datastore')
key_prefix = self._get_full_key_prefix(local=local, prefix=prefix)
kvps = client.keys.get_all(prefix=key_prefix)
return kvps
|
'Retrieve a value from the datastore for the provided key.
By default, value is retrieved from the namespace local to the pack/class. If you want to
retrieve a global value from a datastore, pass local=False to this method.
:param name: Key name.
:type name: ``str``
:param local: Retrieve value from a namespace local to the pack/class. Defaults to True.
:type: local: ``bool``
:param scope: Scope under which item is saved. Defaults to system scope.
:type: local: ``str``
:param decrypt: Return the decrypted value. Defaults to False.
:type: local: ``bool``
:rtype: ``str`` or ``None``'
| def get_value(self, name, local=True, scope=SYSTEM_SCOPE, decrypt=False):
| if (scope != SYSTEM_SCOPE):
raise ValueError(('Scope %s is unsupported.' % scope))
name = self._get_full_key_name(name=name, local=local)
client = self._get_api_client()
self._logger.audit('Retrieving value from the datastore (name=%s)', name)
try:
params = {'decrypt': str(decrypt).lower(), 'scope': scope}
kvp = client.keys.get_by_id(id=name, params=params)
except Exception as e:
self._logger.exception('Exception retrieving value from datastore (name=%s): %s', name, e)
return None
if kvp:
return kvp.value
return None
|
'Set a value for the provided key.
By default, value is set in a namespace local to the pack/class. If you want to
set a global value, pass local=False to this method.
:param name: Key name.
:type name: ``str``
:param value: Key value.
:type value: ``str``
:param ttl: Optional TTL (in seconds).
:type ttl: ``int``
:param local: Set value in a namespace local to the pack/class. Defaults to True.
:type: local: ``bool``
:param scope: Scope under which to place the item. Defaults to system scope.
:type: local: ``str``
:param encrypt: Encrypt the value when saving. Defaults to False.
:type: local: ``bool``
:return: ``True`` on success, ``False`` otherwise.
:rtype: ``bool``'
| def set_value(self, name, value, ttl=None, local=True, scope=SYSTEM_SCOPE, encrypt=False):
| if (scope != SYSTEM_SCOPE):
raise ValueError('Scope %s is unsupported.', scope)
name = self._get_full_key_name(name=name, local=local)
value = str(value)
client = self._get_api_client()
self._logger.audit('Setting value in the datastore (name=%s)', name)
instance = KeyValuePair()
instance.id = name
instance.name = name
instance.value = value
instance.scope = scope
if encrypt:
instance.secret = True
if ttl:
instance.ttl = ttl
client.keys.update(instance=instance)
return True
|
'Delete the provided key.
By default, value is deleted from a namespace local to the pack/class. If you want to
delete a global value, pass local=False to this method.
:param name: Name of the key to delete.
:type name: ``str``
:param local: Delete a value in a namespace local to the pack/class. Defaults to True.
:type: local: ``bool``
:param scope: Scope under which item is saved. Defaults to system scope.
:type: local: ``str``
:return: ``True`` on success, ``False`` otherwise.
:rtype: ``bool``'
| def delete_value(self, name, local=True, scope=SYSTEM_SCOPE):
| if (scope != SYSTEM_SCOPE):
raise ValueError('Scope %s is unsupported.', scope)
name = self._get_full_key_name(name=name, local=local)
client = self._get_api_client()
instance = KeyValuePair()
instance.id = name
instance.name = name
self._logger.audit('Deleting value from the datastore (name=%s)', name)
try:
params = {'scope': scope}
client.keys.delete(instance=instance, params=params)
except Exception as e:
self._logger.exception('Exception deleting value from datastore (name=%s): %s', name, e)
return False
return True
|
'Retrieve API client instance.'
| def _get_api_client(self):
| token_expire = (self._token_expire <= get_datetime_utc_now())
if ((not self._client) or token_expire):
self._logger.audit('Creating new Client object.')
ttl = cfg.CONF.auth.service_token_ttl
self._token_expire = (get_datetime_utc_now() + timedelta(seconds=ttl))
temporary_token = create_token(username=self._api_username, ttl=ttl, service=True)
api_url = get_full_public_api_url()
self._client = Client(api_url=api_url, token=temporary_token.token)
return self._client
|
'Retrieve a full key name.
:rtype: ``str``'
| def _get_full_key_name(self, name, local):
| if local:
name = self._get_key_name_with_prefix(name=name)
return name
|
'Retrieve key prefix which is local to this pack/class.'
| def _get_local_key_name_prefix(self):
| key_prefix = (self._get_datastore_key_prefix() + self.DATASTORE_NAME_SEPARATOR)
return key_prefix
|
'Retrieve a full key name which is local to the current pack/class.
:param name: Base datastore key name.
:type name: ``str``
:rtype: ``str``'
| def _get_key_name_with_prefix(self, name):
| prefix = self._get_datastore_key_prefix()
full_name = ((prefix + self.DATASTORE_NAME_SEPARATOR) + name)
return full_name
|
'This is the method individual queriers must implement.
This method should return a tuple of (status, results).
status should be one of LIVEACTION_STATUS_SUCCEEDED, LIVEACTION_STATUS_RUNNING,
LIVEACTION_STATUS_FAILED defined in st2common.constants.action.'
| def query(self, execution_id, query_context, last_query_time=None):
| pass
|
'Returns the path to the hg executable
:return: The string path to the executable or False on error'
| def retrieve_binary(self):
| name = 'hg'
if (os.name == 'nt'):
name += '.exe'
binary = self.find_binary(name)
if (not binary):
show_error(u'\n Unable to find %s.\n\n Please set the "hg_binary" setting by accessing the\n Preferences > Package Settings > Package Control > Settings\n \u2013 User menu entry.\n\n The Settings \u2013 Default entry can be used for reference,\n but changes to that will be overwritten upon next upgrade.\n ', name)
return False
return binary
|
'Updates the repository with remote changes
:return: False or error, or True on success'
| def run(self):
| binary = self.retrieve_binary()
if (not binary):
return False
args = [binary]
args.extend(self.update_command)
args.append('default')
self.execute(args, self.working_copy, meaningful_output=True)
return True
|
':return: bool if remote revisions are available'
| def incoming(self):
| cache_key = (self.working_copy + '.incoming')
incoming = get_cache(cache_key)
if (incoming is not None):
return incoming
binary = self.retrieve_binary()
if (not binary):
return False
args = [binary, 'in', '-q', 'default']
output = self.execute(args, self.working_copy, meaningful_output=True)
if (output is False):
return False
incoming = (len(output) > 0)
set_cache(cache_key, incoming, self.cache_length)
return incoming
|
':return:
The latest commit hash'
| def latest_commit(self):
| binary = self.retrieve_binary()
if (not binary):
return False
args = [binary, 'id', '-i']
output = self.execute(args, self.working_copy)
if (output is False):
return False
return output.strip()
|
'Returns the path to the git executable
:return: The string path to the executable or False on error'
| def retrieve_binary(self):
| name = 'git'
if (os.name == 'nt'):
name += '.exe'
binary = self.find_binary(name)
if (not binary):
show_error(u'\n Unable to find %s.\n\n Please set the "git_binary" setting by accessing the\n Preferences > Package Settings > Package Control > Settings\n \u2013 User menu entry.\n\n The Settings \u2013 Default entry can be used for reference,\n but changes to that will be overwritten upon next upgrade.\n ', name)
return False
if (os.name == 'nt'):
tortoise_plink = self.find_binary('TortoisePlink.exe')
if (tortoise_plink and (u'pageant.exe' in list_process_names())):
os.environ.setdefault('GIT_SSH', tortoise_plink)
return binary
|
'Updates the repository with remote changes
:return: False or error, or True on success'
| def run(self):
| binary = self.retrieve_binary()
if (not binary):
return False
info = self.get_working_copy_info()
if (info is False):
return False
args = [binary]
args.extend(self.update_command)
args.extend([info['remote'], info['remote_branch']])
self.execute(args, self.working_copy, meaningful_output=True)
return True
|
':return: bool if remote revisions are available'
| def incoming(self):
| cache_key = (self.working_copy + '.incoming')
incoming = get_cache(cache_key)
if (incoming is not None):
return incoming
binary = self.retrieve_binary()
if (not binary):
return False
info = self.get_working_copy_info()
if (info is False):
return False
res = self.execute([binary, 'fetch', info['remote']], self.working_copy)
if (res is False):
return False
args = [binary, 'log']
args.append(('..%s/%s' % (info['remote'], info['remote_branch'])))
output = self.execute(args, self.working_copy, meaningful_output=True)
incoming = (len(output) > 0)
set_cache(cache_key, incoming, self.cache_length)
return incoming
|
':return:
The latest commit hash'
| def latest_commit(self):
| binary = self.retrieve_binary()
if (not binary):
return False
args = [binary, 'rev-parse', '--short', 'HEAD']
output = self.execute(args, self.working_copy)
if (output is False):
return False
return output.strip()
|
'Quick panel user selection handler - enables the selected package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.'
| def on_done(self, picked):
| if (picked == (-1)):
return
package = self.disabled_packages[picked]
self.reenable_package(package, 'enable')
sublime.status_message(text.format('\n Package %s successfully removed from list of disabled packages -\n restarting Sublime Text may be required\n ', package))
|
'Input panel handler - adds the provided URL as a channel
:param input:
A string of the URL to the new channel'
| def on_done(self, input):
| input = input.strip()
if (re.match('https?://', input, re.I) is None):
show_error(u'\n Unable to add the channel "%s" since it does not appear to be\n served via HTTP (http:// or https://).\n ', input)
return
settings = sublime.load_settings(pc_settings_filename())
channels = settings.get('channels', [])
if (not channels):
channels = []
channels.append(input)
settings.set('channels', channels)
sublime.save_settings(pc_settings_filename())
sublime.status_message(('Channel %s successfully added' % input))
|
'Quick panel user selection handler - disables the selected package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.'
| def on_done(self, picked):
| if (picked == (-1)):
return
package = self.package_list[picked]
self.disable_packages(package, 'disable')
sublime.status_message(text.format('\n Package %s successfully added to list of disabled packages -\n restarting Sublime Text may be required\n ', package))
|
':param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the list of installed packages in.
:param filter_function:
A callable to filter packages for display. This function gets
called for each package in the list with a three-element list
as returned by :meth:`ExistingPackagesCommand.make_package_list`:
0 - package name
1 - package description
2 - [action] installed version; package url
If the function returns a true value, the package is listed,
otherwise it is discarded. If `None`, no filtering is performed.'
| def __init__(self, window, filter_function=None):
| self.window = window
self.filter_function = filter_function
self.manager = PackageManager()
threading.Thread.__init__(self)
|
'Quick panel user selection handler - opens the homepage for any
selected package in the user\'s browser
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.'
| def on_done(self, picked):
| if (picked == (-1)):
return
package_name = self.package_list[picked][0]
def open_dir():
package_dir = self.manager.get_package_dir(package_name)
package_file = None
if (not os.path.exists(package_dir)):
package_dir = self.manager.settings['installed_packages_path']
package_file = (package_name + '.sublime-package')
if (not os.path.exists(os.path.join(package_dir, package_file))):
package_file = None
open_dir_file = {'dir': package_dir}
if (package_file is not None):
open_dir_file['file'] = package_file
self.window.run_command('open_dir', open_dir_file)
sublime.set_timeout(open_dir, 10)
|
'Input panel handler - adds the provided URL as a repository
:param input:
A string of the URL to the new repository'
| def on_done(self, input):
| input = input.strip()
if (re.match('https?://', input, re.I) is None):
show_error(u'\n Unable to add the repository "%s" since it does not appear to\n be served via HTTP (http:// or https://).\n ', input)
return
settings = sublime.load_settings(pc_settings_filename())
repositories = settings.get('repositories', [])
if (not repositories):
repositories = []
repositories.append(input)
settings.set('repositories', repositories)
sublime.save_settings(pc_settings_filename())
sublime.status_message(('Repository %s successfully added' % input))
|
':param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the list of installed packages in.'
| def __init__(self, window):
| self.window = window
self.manager = PackageManager()
|
'Quick panel user selection handler - deletes the selected package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.'
| def on_done(self, picked):
| if (picked == (-1)):
return
package = self.package_list[picked][0]
self.disable_packages(package, 'remove')
thread = RemovePackageThread(self.manager, package)
thread.start()
ThreadProgress(thread, ('Removing package %s' % package), ('Package %s successfully removed' % package))
|
':param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the list of upgradable packages in.
:param package_renamer:
An instance of :class:`PackageRenamer`'
| def __init__(self, window, package_renamer):
| self.window = window
self.package_renamer = package_renamer
self.completion_type = 'upgraded'
threading.Thread.__init__(self)
PackageInstaller.__init__(self)
|
'Quick panel user selection handler - disables a package, upgrades it,
then re-enables the package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.'
| def on_done(self, picked):
| if (picked == (-1)):
return
name = self.package_list[picked][0]
if (name in self.disable_packages(name, 'upgrade')):
def on_complete():
self.reenable_package(name)
else:
on_complete = None
thread = PackageInstallerThread(self.manager, name, on_complete, pause=True)
thread.start()
ThreadProgress(thread, ('Upgrading package %s' % name), ('Package %s successfully %s' % (name, self.completion_type)))
|
'Returns a list of installed packages suitable for displaying in the
quick panel.
:param action:
An action to display at the beginning of the third element of the
list returned for each package
:return:
A list of lists, each containing three strings:
0 - package name
1 - package description
2 - [action] installed version; package url'
| def make_package_list(self, action=''):
| packages = self.manager.list_packages()
if action:
action += ' '
package_list = []
for package in sorted(packages, key=(lambda s: s.lower())):
package_entry = [package]
metadata = self.manager.get_metadata(package)
package_dir = os.path.join(sublime.packages_path(), package)
description = metadata.get('description')
if (not description):
description = 'No description provided'
package_entry.append(description)
version = metadata.get('version')
if ((not version) and os.path.exists(os.path.join(package_dir, '.git'))):
installed_version = 'git repository'
elif ((not version) and os.path.exists(os.path.join(package_dir, '.hg'))):
installed_version = 'hg repository'
else:
installed_version = (('v' + version) if version else 'unknown version')
url = metadata.get('url')
if url:
url = ('; ' + re.sub('^https?://', '', url))
else:
url = ''
package_entry.append(((action + installed_version) + url))
package_list.append(package_entry)
return package_list
|
'Input panel handler - adds the provided URL as a repository
:param input:
A string of the URL to the new repository'
| def on_done(self, input):
| input = input.strip()
if (not input):
show_error(u'\n No package names were entered\n ')
return
self.start(self.split(input))
|
':param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the available package list in.'
| def __init__(self, packages):
| self.manager = PackageManager()
self.packages = packages
self.installed = self.manager.list_packages()
self.disabled = []
for package_name in packages:
operation_type = ('install' if (package_name not in self.installed) else 'upgrade')
self.disabled.extend(self.disable_packages(package_name, operation_type))
threading.Thread.__init__(self)
|
'Quick panel handler - removes the repository from settings
:param index:
The numeric index of the repository in the list of repositories'
| def on_done(self, index):
| if (index == (-1)):
return
repository = self.repositories[index]
try:
self.repositories.remove(repository)
self.settings.set('repositories', self.repositories)
sublime.save_settings(pc_settings_filename())
sublime.status_message(('Repository %s successfully removed' % repository))
except ValueError:
pass
|
':param window:
An instance of :class:`sublime.Window` that represents the Sublime
Text window to show the available package list in.'
| def __init__(self, window):
| self.window = window
self.completion_type = 'installed'
threading.Thread.__init__(self)
PackageInstaller.__init__(self)
|
'Quick panel handler - removes the channel from settings
:param index:
The numeric index of the channel in the list of channels'
| def on_done(self, index):
| if (index == (-1)):
return
channel = self.channels[index]
try:
self.channels.remove(channel)
self.settings.set('channels', self.channels)
sublime.save_settings(pc_settings_filename())
sublime.status_message(('Channel %s successfully removed' % channel))
except ValueError:
pass
|
'Quick panel user selection handler - addds a loader for the selected
dependency
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.'
| def on_done(self, picked):
| if (picked == (-1)):
return
dependency = self.dependency_list[picked]
(priority, code) = self.manager.get_dependency_priority_code(dependency)
loader.add(priority, dependency, code)
sublime.status_message(text.format('\n Dependency %s successfully added to dependency loader -\n restarting Sublime Text may be required\n ', dependency))
|
'Determines the supported encodings we can decode
:return:
A comma-separated string of valid encodings'
| def supported_encodings(self):
| encodings = 'gzip,deflate'
if bz2:
encodings = ('bzip2,' + encodings)
return encodings
|
'Decodes the raw response from the web server based on the
Content-Encoding HTTP header
:param encoding:
The value of the Content-Encoding HTTP header
:param response:
The raw response from the server
:return:
The decoded response'
| def decode_response(self, encoding, response):
| if (encoding == 'bzip2'):
if bz2:
return bz2.decompress(response)
else:
raise DownloaderException(u'Received bzip2 file contents, but was unable to import the bz2 module')
elif (encoding == 'gzip'):
return gzip.GzipFile(fileobj=StringIO(response)).read()
elif (encoding == 'deflate'):
decompresser = zlib.decompressobj((- zlib.MAX_WBITS))
return (decompresser.decompress(response) + decompresser.flush())
return response
|
'Closes any persistent/open connections'
| def close(self):
| if (not self.opener):
return
handler = self.get_handler()
if handler:
handler.close()
self.opener = None
|
'Downloads a URL and returns the contents
Uses the proxy settings from the Package Control.sublime-settings file,
however there seem to be a decent number of proxies that this code
does not work with. Patches welcome!
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
:return:
The string contents of the URL'
| def download(self, url, error_message, timeout, tries, prefer_cached=False):
| if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
self.setup_opener(url, timeout)
debug = self.settings.get('debug')
tried = tries
error_string = None
while (tries > 0):
tries -= 1
try:
request_headers = {'Accept-Encoding': self.supported_encodings()}
user_agent = self.settings.get('user_agent')
if user_agent:
request_headers['User-Agent'] = user_agent
request_headers = self.add_conditional_headers(url, request_headers)
request = Request(url, headers=request_headers)
http_file = self.opener.open(request, timeout=timeout)
self.handle_rate_limit(http_file.headers, url)
result = http_file.read()
http_file.close()
encoding = http_file.headers.get('content-encoding')
result = self.decode_response(encoding, result)
return self.cache_result('get', url, http_file.getcode(), http_file.headers, result)
except HTTPException as e:
if isinstance(e, BadStatusLine):
handler = self.get_handler()
if (handler and (handler.use_count > 1)):
self.close()
self.setup_opener(url, timeout)
tries += 1
continue
exception_type = e.__class__.__name__
error_string = text.format(u'\n %s HTTP exception %s (%s) downloading %s.\n ', (error_message, exception_type, unicode_from_os(e), url))
except HTTPError as e:
e.read()
e.close()
self.handle_rate_limit(e.headers, url)
if (unicode_from_os(e.code) == '304'):
return self.cache_result('get', url, int(e.code), e.headers, '')
if ((unicode_from_os(e.code) == '503') and (tries != 0)):
if (tries and debug):
console_write(u'\n Downloading %s was rate limited, trying again\n ', url)
continue
error_string = text.format(u'\n %s HTTP error %s downloading %s.\n ', (error_message, unicode_from_os(e.code), url))
except URLError as e:
if ((unicode_from_os(e.reason) == 'The read operation timed out') or (unicode_from_os(e.reason) == 'timed out')):
if (tries and debug):
console_write(u'\n Downloading %s timed out, trying again\n ', url)
continue
error_string = text.format(u'\n %s URL error %s downloading %s.\n ', (error_message, unicode_from_os(e.reason), url))
except ConnectionError:
if debug:
console_write(u'\n Connection went away while trying to download %s, trying again\n ', url)
self.opener = None
self.setup_opener(url, timeout)
continue
break
if (error_string is None):
plural = (u's' if (tried > 1) else u'')
error_string = (u'Unable to download %s after %d attempt%s' % (url, tried, plural))
raise DownloaderException(error_string)
|
'Get the HTTPHandler object for the current connection'
| def get_handler(self):
| if (not self.opener):
return None
for handler in self.opener.handlers:
if (isinstance(handler, ValidatingHTTPSHandler) or isinstance(handler, DebuggableHTTPHandler)):
return handler
|
'Sets up a urllib OpenerDirector to be used for requests. There is a
fair amount of custom urllib code in Package Control, and part of it
is to handle proxies and keep-alives. Creating an opener the way
below is because the handlers have been customized to send the
"Connection: Keep-Alive" header and hold onto connections so they
can be re-used.
:param url:
The URL to download
:param timeout:
The int number of seconds to set the timeout to'
| def setup_opener(self, url, timeout):
| if (not self.opener):
http_proxy = self.settings.get('http_proxy')
https_proxy = self.settings.get('https_proxy')
if (http_proxy or https_proxy):
proxies = {}
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
proxy_handler = ProxyHandler(proxies)
else:
proxy_handler = ProxyHandler()
password_manager = HTTPPasswordMgrWithDefaultRealm()
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if (proxy_username and proxy_password):
if http_proxy:
password_manager.add_password(None, http_proxy, proxy_username, proxy_password)
if https_proxy:
password_manager.add_password(None, https_proxy, proxy_username, proxy_password)
handlers = [proxy_handler]
basic_auth_handler = ProxyBasicAuthHandler(password_manager)
digest_auth_handler = ProxyDigestAuthHandler(password_manager)
handlers.extend([digest_auth_handler, basic_auth_handler])
debug = self.settings.get('debug')
if debug:
console_write(u'\n Urllib Debug Proxy\n http_proxy: %s\n https_proxy: %s\n proxy_username: %s\n proxy_password: %s\n ', (http_proxy, https_proxy, proxy_username, proxy_password))
secure_url_match = re.match('^https://([^/]+)', url)
if (secure_url_match is not None):
bundle_path = get_ca_bundle_path(self.settings)
bundle_path = bundle_path.encode(sys.getfilesystemencoding())
handlers.append(ValidatingHTTPSHandler(ca_certs=bundle_path, debug=debug, passwd=password_manager, user_agent=self.settings.get('user_agent')))
else:
handlers.append(DebuggableHTTPHandler(debug=debug, passwd=password_manager))
self.opener = build_opener(*handlers)
|
'Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests'
| def supports_ssl(self):
| return (('ssl' in sys.modules) and hasattr(urllib_compat, 'HTTPSHandler'))
|
'Finds the given executable name in the system PATH
:param name:
The exact name of the executable to find
:return:
The absolute path to the executable
:raises:
BinaryNotFoundError when the executable can not be found'
| def find_binary(self, name):
| dirs = os.environ['PATH'].split(os.pathsep)
if (os.name != 'nt'):
dirs.append('/usr/local/bin')
for dir_ in dirs:
path = os.path.join(dir_, name)
if os.path.exists(path):
return path
raise BinaryNotFoundError(('The binary %s could not be located' % name))
|
'Runs the executable and args and returns the result
:param args:
A list of the executable path and all arguments to be passed to it
:return:
The text output of the executable
:raises:
NonCleanExitError when the executable exits with an error'
| def execute(self, args):
| if self.settings.get('debug'):
console_write(u'\n Trying to execute command %s\n ', create_cmd(args))
proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = proc.stdout.read()
self.stderr = proc.stderr.read()
returncode = proc.wait()
if (returncode != 0):
error = NonCleanExitError(returncode)
error.stderr = self.stderr
error.stdout = output
raise error
return output
|
'No-op for compatibility with UrllibDownloader and WinINetDownloader'
| def close(self):
| pass
|
'Downloads a URL and returns the contents
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
:return:
The string contents of the URL'
| def download(self, url, error_message, timeout, tries, prefer_cached=False):
| if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
self.tmp_file = tempfile.NamedTemporaryFile().name
command = [self.wget, ('--connect-timeout=' + str_cls(int(timeout))), '-o', self.tmp_file, '-O', '-', '--secure-protocol=TLSv1']
user_agent = self.settings.get('user_agent')
if user_agent:
command.extend(['-U', user_agent])
request_headers = {'Accept-Encoding': self.supported_encodings()}
request_headers = self.add_conditional_headers(url, request_headers)
for (name, value) in request_headers.items():
command.extend(['--header', ('%s: %s' % (name, value))])
secure_url_match = re.match('^https://([^/]+)', url)
if (secure_url_match is not None):
bundle_path = get_ca_bundle_path(self.settings)
command.append((u'--ca-certificate=' + bundle_path))
if self.debug:
command.append('-d')
else:
command.append('-S')
http_proxy = self.settings.get('http_proxy')
https_proxy = self.settings.get('https_proxy')
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if proxy_username:
command.append((u'--proxy-user=%s' % proxy_username))
if proxy_password:
command.append((u'--proxy-password=%s' % proxy_password))
if self.debug:
console_write(u'\n Wget Debug Proxy\n http_proxy: %s\n https_proxy: %s\n proxy_username: %s\n proxy_password: %s\n ', (http_proxy, https_proxy, proxy_username, proxy_password))
command.append(url)
if http_proxy:
os.putenv('http_proxy', http_proxy)
if https_proxy:
os.putenv('https_proxy', https_proxy)
error_string = None
while (tries > 0):
tries -= 1
try:
result = self.execute(command)
(general, headers) = self.parse_output(True)
encoding = headers.get('content-encoding')
result = self.decode_response(encoding, result)
result = self.cache_result('get', url, general['status'], headers, result)
return result
except NonCleanExitError as e:
try:
(general, headers) = self.parse_output(False)
self.handle_rate_limit(headers, url)
if (general['status'] == 304):
return self.cache_result('get', url, general['status'], headers, None)
if ((general['status'] == 503) and (tries != 0)):
if (tries and self.debug):
console_write(u'\n Downloading %s was rate limited, trying again\n ', url)
continue
download_error = ('HTTP error %s' % general['status'])
except NonHttpError as e:
download_error = unicode_from_os(e)
if (download_error.find('timed out') != (-1)):
if (tries and self.debug):
console_write(u'\n Downloading %s timed out, trying again\n ', url)
continue
error_string = (u'%s %s downloading %s.' % (error_message, download_error, url))
break
raise DownloaderException(error_string)
|
'Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests'
| def supports_ssl(self):
| return True
|
'Parses the wget output file, prints debug information and returns headers
:param clean_run:
If wget executed with a successful exit code
:raises:
NonHttpError - when clean_run is false and an error is detected
:return:
A tuple of (general, headers) where general is a dict with the keys:
`version` - HTTP version number (string)
`status` - HTTP status code (integer)
`message` - HTTP status message (string)
And headers is a dict with the keys being lower-case version of the
HTTP header names.'
| def parse_output(self, clean_run):
| with open_compat(self.tmp_file, 'r') as f:
output = read_compat(f).splitlines()
self.clean_tmp_file()
error = None
header_lines = []
if self.debug:
section = 'General'
last_section = None
for line in output:
if (section == 'General'):
if self.skippable_line(line):
continue
if (line.strip() == ''):
continue
if (line[0:5] == 'wget:'):
error = line[5:].strip()
if (line[0:7] == 'failed:'):
error = line[7:].strip()
if (line == '---request begin---'):
section = 'Write'
continue
elif (line == '---request end---'):
section = 'General'
continue
elif (line == '---response begin---'):
section = 'Read'
continue
elif (line == '---response end---'):
section = 'General'
continue
if (section != last_section):
console_write(u'Wget HTTP Debug %s', section)
if (section == 'Read'):
header_lines.append(line)
console_write(u' %s', line, prefix=False)
last_section = section
else:
for line in output:
if self.skippable_line(line):
continue
if re.match('(Resolving |Connecting to )', line):
failed_match = re.search(' failed: (.*)$', line)
if failed_match:
error = failed_match.group(1).strip()
if (line[0:5] == 'wget:'):
error = line[5:].strip()
if (line[0:7] == 'failed:'):
error = line[7:].strip()
if (line[0:2] == ' '):
header_lines.append(line.lstrip())
if ((not clean_run) and error):
raise NonHttpError(error)
return self.parse_headers(header_lines)
|
'Determines if a debug line is skippable - usually because of extraneous
or duplicate information.
:param line:
The debug line to check
:return:
True if the line is skippable, otherwise None'
| def skippable_line(self, line):
| if re.match('--\\d{4}-\\d{2}-\\d{2}', line):
return True
if re.match('\\d{4}-\\d{2}-\\d{2}', line):
return True
if re.match('\\d{3} ', line):
return True
if re.match('(Saving to:|\\s*\\d+K)', line):
return True
if re.match('Skipping \\d+ byte', line):
return True
|
'Parses HTTP headers into two dict objects
:param output:
An array of header lines, if None, loads from temp output file
:return:
A tuple of (general, headers) where general is a dict with the keys:
`version` - HTTP version number (string)
`status` - HTTP status code (integer)
`message` - HTTP status message (string)
And headers is a dict with the keys being lower-case version of the
HTTP header names.'
| def parse_headers(self, output=None):
| if (not output):
with open_compat(self.tmp_file, 'r') as f:
output = read_compat(f).splitlines()
self.clean_tmp_file()
general = {'version': '0.9', 'status': 200, 'message': 'OK'}
headers = {}
for line in output:
line = line.lstrip()
if (line.find('HTTP/') == 0):
match = re.match('HTTP/(\\d\\.\\d)\\s+(\\d+)(?:\\s+(.*))?$', line)
general['version'] = match.group(1)
general['status'] = int(match.group(2))
general['message'] = (match.group(3) or '')
else:
(name, value) = line.split(':', 1)
headers[name.lower()] = value.strip()
return (general, headers)
|
'Add `If-Modified-Since` and `If-None-Match` headers to a request if a
cached copy exists
:param headers:
A dict with the request headers
:return:
The request headers dict, possibly with new headers added'
| def add_conditional_headers(self, url, headers):
| if (not self.settings.get('cache')):
return headers
info_key = self.generate_key(url, '.info')
info_json = self.settings['cache'].get(info_key)
if (not info_json):
return headers
key = self.generate_key(url)
if (not self.settings['cache'].has(key)):
return headers
try:
info = json.loads(info_json.decode('utf-8'))
except ValueError:
return headers
etag = info.get('etag')
if etag:
headers['If-None-Match'] = etag
last_modified = info.get('last-modified')
if last_modified:
headers['If-Modified-Since'] = last_modified
return headers
|
'Processes a request result, either caching the result, or returning
the cached version of the url.
:param method:
The HTTP method used for the request
:param url:
The url of the request
:param status:
The numeric response status of the request
:param headers:
A dict of reponse headers, with keys being lowercase
:param content:
The response content
:return:
The response content'
| def cache_result(self, method, url, status, headers, content):
| debug = self.settings.get('debug', False)
cache = self.settings.get('cache')
if (not cache):
if debug:
console_write(u'\n Skipping cache since there is no cache object\n ')
return content
if (method.lower() != 'get'):
if debug:
console_write(u'\n Skipping cache since the HTTP method != GET\n ')
return content
status = int(status)
if (status not in [200, 304]):
if debug:
console_write(u'\n Skipping cache since the HTTP status code not one of: 200, 304\n ')
return content
key = self.generate_key(url)
if (status == 304):
cached_content = cache.get(key)
if cached_content:
if debug:
console_write(u'\n Using cached content for %s from %s\n ', (url, cache.path(key)))
return cached_content
return content
cache_control = headers.get('cache-control', '')
if cache_control:
fields = re.split(',\\s*', cache_control)
for field in fields:
if (field == 'no-store'):
return content
if (headers.get('content-type') in ['application/zip', 'application/octet-stream']):
if debug:
console_write(u'\n Skipping cache since the response is a zip file\n ')
return content
etag = headers.get('etag')
last_modified = headers.get('last-modified')
if ((not etag) and (not last_modified)):
return content
struct = {'etag': etag, 'last-modified': last_modified}
struct_json = json.dumps(struct, indent=4)
info_key = self.generate_key(url, '.info')
if debug:
console_write(u'\n Caching %s in %s\n ', (url, cache.path(key)))
cache.set(info_key, struct_json.encode('utf-8'))
cache.set(key, content)
return content
|
'Generates a key to store the cache under
:param url:
The URL being cached
:param suffix:
A string to append to the key
:return:
A string key for the URL'
| def generate_key(self, url, suffix=''):
| if isinstance(url, str_cls):
url = url.encode('utf-8')
key = hashlib.md5(url).hexdigest()
return (key + suffix)
|
'Tries to return the cached content for a URL
:param url:
The URL to get the cached content for
:return:
The cached content'
| def retrieve_cached(self, url):
| key = self.generate_key(url)
cache = self.settings['cache']
if (not cache.has(key)):
return False
if self.settings.get('debug'):
console_write(u'\n Using cached content for %s from %s\n ', (url, cache.path(key)))
return cache.get(key)
|
'Checks the headers of a response object to make sure we are obeying the
rate limit
:param headers:
The dict-like object that contains lower-cased headers
:param url:
The URL that was requested
:raises:
RateLimitException when the rate limit has been hit'
| def handle_rate_limit(self, headers, url):
| limit_remaining = headers.get('x-ratelimit-remaining', '1')
limit = headers.get('x-ratelimit-limit', '1')
if (str_cls(limit_remaining) == '0'):
hostname = urlparse(url).hostname
raise RateLimitException(hostname, limit)
|
'Adds a URL to the list to download
:param url:
The URL to download info about'
| def add_url(self, url):
| self.urls.append(url)
|
'Returns the provider for the URL specified
:param url:
The URL to return the provider for
:return:
The provider object for the URL'
| def get_provider(self, url):
| return self.used_providers.get(url)
|
'No-op for compatibility with UrllibDownloader and WinINetDownloader'
| def close(self):
| pass
|
'Downloads a URL and returns the contents
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
:return:
The string contents of the URL'
| def download(self, url, error_message, timeout, tries, prefer_cached=False):
| if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
self.tmp_file = tempfile.NamedTemporaryFile().name
command = [self.curl, '--connect-timeout', str_cls(int(timeout)), '-sSL', '--tlsv1', '--dump-header', self.tmp_file]
user_agent = self.settings.get('user_agent')
if user_agent:
command.extend(['--user-agent', user_agent])
request_headers = self.add_conditional_headers(url, {})
request_headers['Accept-Encoding'] = self.supported_encodings()
for (name, value) in request_headers.items():
command.extend(['--header', ('%s: %s' % (name, value))])
secure_url_match = re.match('^https://([^/]+)', url)
if (secure_url_match is not None):
bundle_path = get_ca_bundle_path(self.settings)
command.extend(['--cacert', bundle_path])
debug = self.settings.get('debug')
command.append('-v')
http_proxy = self.settings.get('http_proxy')
https_proxy = self.settings.get('https_proxy')
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if debug:
console_write(u'\n Curl Debug Proxy\n http_proxy: %s\n https_proxy: %s\n proxy_username: %s\n proxy_password: %s\n ', (http_proxy, https_proxy, proxy_username, proxy_password))
if (http_proxy or https_proxy):
command.append('--proxy-anyauth')
if (proxy_username or proxy_password):
command.extend(['-U', (u'%s:%s' % (proxy_username, proxy_password))])
if http_proxy:
os.putenv('http_proxy', http_proxy)
if https_proxy:
os.putenv('HTTPS_PROXY', https_proxy)
command.append(url)
error_string = None
while (tries > 0):
tries -= 1
try:
output = self.execute(command)
with open_compat(self.tmp_file, 'r') as f:
headers_str = read_compat(f)
self.clean_tmp_file()
message = 'OK'
status = 200
headers = {}
for header in headers_str.splitlines():
if (header[0:5] == 'HTTP/'):
message = re.sub('^HTTP/\\d(?:\\.\\d)?\\s+\\d+\\s*', '', header)
status = int(re.sub('^HTTP/\\d(?:\\.\\d)?\\s+(\\d+)(\\s+.*)?$', '\\1', header))
continue
if (header.strip() == ''):
continue
(name, value) = header.split(':', 1)
headers[name.lower()] = value.strip()
(error, debug_sections) = self.split_debug(self.stderr.decode('utf-8'))
if debug:
self.print_debug(debug_sections)
self.handle_rate_limit(headers, url)
if (status not in [200, 304]):
e = NonCleanExitError(22)
e.stderr = ('%s %s' % (status, message))
raise e
encoding = headers.get('content-encoding')
output = self.decode_response(encoding, output)
output = self.cache_result('get', url, status, headers, output)
return output
except NonCleanExitError as e:
if hasattr(e.stderr, 'decode'):
e.stderr = e.stderr.decode('utf-8', 'replace')
(e.stderr, debug_sections) = self.split_debug(e.stderr)
if debug:
self.print_debug(debug_sections)
self.clean_tmp_file()
download_error = e.stderr.rstrip()
if (e.returncode == 22):
code = re.sub('^.*?(\\d+)([\\w\\s]+)?$', '\\1', e.stderr)
if ((code == '503') and (tries != 0)):
if (tries and debug):
console_write(u'\n Downloading %s was rate limited, trying again\n ', url)
continue
download_error = (u'HTTP error ' + code)
elif (e.returncode == 7):
full_debug = u'\n'.join([section['contents'] for section in debug_sections])
ipv6_error = re.search('^\\s*connect to ([0-9a-f]+(:+[0-9a-f]+)+) port \\d+ failed: Network is unreachable', full_debug, (re.I | re.M))
if (ipv6_error and (tries != 0)):
if debug:
console_write(u'\n Downloading %s failed because the ipv6 address\n %s was not reachable, retrying using ipv4\n ', (url, ipv6_error.group(1)))
command.insert(1, '-4')
continue
elif (e.returncode == 6):
download_error = u'URL error host not found'
elif (e.returncode == 28):
if (tries and debug):
console_write(u'\n Downloading %s timed out, trying again\n ', url)
continue
error_string = (u'%s %s downloading %s.' % (error_message, download_error, url))
break
raise DownloaderException(error_string)
|
'Prints out the debug output from split_debug()
:param sections:
The second element in the tuple that is returned from split_debug()'
| def print_debug(self, sections):
| for section in sections:
type = section['type']
indented_contents = section['contents'].replace(u'\n', u'\n ')
console_write(u'\n Curl HTTP Debug %s\n %s\n ', (type, indented_contents))
|
'Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests'
| def supports_ssl(self):
| return True
|
'Takes debug output from curl and splits it into stderr and
structured debug info
:param string:
The complete debug output from curl
:return:
A tuple with [0] stderr output and [1] a list of dict
objects containing the keys "type" and "contents"'
| def split_debug(self, string):
| section = 'General'
last_section = None
stderr = u''
debug_sections = []
debug_section = u''
for line in string.splitlines():
if (line and (line[0:2] == u'{ ')):
continue
if (line and (line[0:18] == u'} [data not shown]')):
continue
if (len(line) > 1):
subtract = 0
if (line[0:2] == u'* '):
section = u'General'
subtract = 2
elif (line[0:2] == u'> '):
section = u'Write'
subtract = 2
elif (line[0:2] == u'< '):
section = u'Read'
subtract = 2
line = line[subtract:]
if ((subtract == 0) and (line[0:2] != u' ')):
stderr += (line.rstrip() + u' ')
continue
if (line.strip() == ''):
continue
if ((section != last_section) and (len(debug_section.rstrip()) > 0)):
debug_sections.append({'type': section, 'contents': debug_section.rstrip()})
debug_section = u''
debug_section += (u'%s\n' % line)
last_section = section
if (len(debug_section.rstrip()) > 0):
debug_sections.append({'type': section, 'contents': debug_section.rstrip()})
return (stderr.rstrip(), debug_sections)
|
'Closes any persistent/open connections'
| def close(self):
| changed_state_back = False
if self.tcp_connection:
wininet.InternetCloseHandle(self.tcp_connection)
self.tcp_connection = None
if self.network_connection:
wininet.InternetCloseHandle(self.network_connection)
self.network_connection = None
if self.was_offline:
dw_connected_state = wintypes.DWORD(self.INTERNET_STATE_DISCONNECTED_BY_USER)
dw_flags = wintypes.DWORD(0)
connected_info = InternetConnectedInfo(dw_connected_state, dw_flags)
wininet.InternetSetOptionA(None, self.INTERNET_OPTION_CONNECTED_STATE, ctypes.byref(connected_info), ctypes.sizeof(connected_info))
changed_state_back = True
if self.debug:
s = ('' if (self.use_count == 1) else 's')
console_write(u'\n WinINet %s Debug General\n Closing connection to %s on port %s after %s request%s\n ', (self.scheme.upper(), self.hostname, self.port, self.use_count, s))
if changed_state_back:
console_write(u' Changed Internet Explorer back to Work Offline', prefix=False)
self.hostname = None
self.port = None
self.scheme = None
self.use_count = 0
self.was_offline = None
|
'Downloads a URL and returns the contents
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
WinDownloaderException: when an internal Windows error occurs
:return:
The string contents of the URL'
| def download(self, url, error_message, timeout, tries, prefer_cached=False):
| if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
url_info = urlparse(url)
if (not url_info.port):
port = (443 if (url_info.scheme == 'https') else 80)
hostname = url_info.netloc
else:
port = url_info.port
hostname = url_info.hostname
path = url_info.path
if url_info.params:
path += (';' + url_info.params)
if url_info.query:
path += ('?' + url_info.query)
request_headers = {'Accept-Encoding': self.supported_encodings()}
request_headers = self.add_conditional_headers(url, request_headers)
created_connection = False
changed_to_online = False
if ((self.hostname and (self.hostname != hostname)) or (self.port and (self.port != port))):
self.close()
ctypes.windll.kernel32.SetLastError(0)
if (not self.tcp_connection):
created_connection = True
state = self.read_option(None, self.INTERNET_OPTION_CONNECTED_STATE)
state = ord(state)
if ((state & self.INTERNET_STATE_DISCONNECTED) or (state & self.INTERNET_STATE_DISCONNECTED_BY_USER)):
self.was_offline = True
dw_connected_state = wintypes.DWORD(self.INTERNET_STATE_CONNECTED)
dw_flags = wintypes.DWORD(0)
connected_info = InternetConnectedInfo(dw_connected_state, dw_flags)
wininet.InternetSetOptionA(None, self.INTERNET_OPTION_CONNECTED_STATE, ctypes.byref(connected_info), ctypes.sizeof(connected_info))
changed_to_online = True
self.network_connection = wininet.InternetOpenW(self.settings.get('user_agent', ''), self.INTERNET_OPEN_TYPE_PRECONFIG, None, None, 0)
if (not self.network_connection):
error_string = text.format(u'\n %s %s during network phase of downloading %s.\n ', (error_message, self.extract_error(), url))
raise WinDownloaderException(error_string)
win_timeout = wintypes.DWORD((int(timeout) * 1000))
wininet.InternetSetOptionA(self.network_connection, self.INTERNET_OPTION_CONNECT_TIMEOUT, ctypes.byref(win_timeout), ctypes.sizeof(win_timeout))
wininet.InternetSetOptionA(self.network_connection, self.INTERNET_OPTION_SEND_TIMEOUT, ctypes.byref(win_timeout), ctypes.sizeof(win_timeout))
wininet.InternetSetOptionA(self.network_connection, self.INTERNET_OPTION_RECEIVE_TIMEOUT, ctypes.byref(win_timeout), ctypes.sizeof(win_timeout))
tcp_flags = self.INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS
tcp_flags |= self.INTERNET_FLAG_EXISTING_CONNECT
self.tcp_connection = wininet.InternetConnectW(self.network_connection, hostname, port, None, None, self.INTERNET_SERVICE_HTTP, tcp_flags, 0)
if (not self.tcp_connection):
error_string = text.format(u'\n %s %s during connection phase of downloading %s.\n ', (error_message, self.extract_error(), url))
raise WinDownloaderException(error_string)
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if (proxy_username and proxy_password):
username = ctypes.c_wchar_p(proxy_username)
password = ctypes.c_wchar_p(proxy_password)
wininet.InternetSetOptionW(self.tcp_connection, self.INTERNET_OPTION_PROXY_USERNAME, ctypes.cast(username, ctypes.c_void_p), len(proxy_username))
wininet.InternetSetOptionW(self.tcp_connection, self.INTERNET_OPTION_PROXY_PASSWORD, ctypes.cast(password, ctypes.c_void_p), len(proxy_password))
self.hostname = hostname
self.port = port
self.scheme = url_info.scheme
elif self.debug:
console_write(u'\n WinINet %s Debug General\n Re-using connection to %s on port %s for request #%s\n ', (self.scheme.upper(), self.hostname, self.port, self.use_count))
error_string = None
while (tries > 0):
tries -= 1
try:
http_connection = None
http_flags = self.INTERNET_FLAG_KEEP_CONNECTION
http_flags |= self.INTERNET_FLAG_RELOAD
http_flags |= self.INTERNET_FLAG_NO_CACHE_WRITE
http_flags |= self.INTERNET_FLAG_PRAGMA_NOCACHE
if (self.scheme == 'https'):
http_flags |= self.INTERNET_FLAG_SECURE
http_connection = wininet.HttpOpenRequestW(self.tcp_connection, u'GET', path, u'HTTP/1.1', None, None, http_flags, 0)
if (not http_connection):
error_string = text.format(u'\n %s %s during HTTP connection phase of downloading %s.\n ', (error_message, self.extract_error(), url))
raise WinDownloaderException(error_string)
request_header_lines = []
for (header, value) in request_headers.items():
request_header_lines.append((u'%s: %s' % (header, value)))
request_header_lines = u'\r\n'.join(request_header_lines)
success = wininet.HttpSendRequestW(http_connection, request_header_lines, len(request_header_lines), None, 0)
if (not success):
error_string = text.format(u'\n %s %s during HTTP write phase of downloading %s.\n ', (error_message, self.extract_error(), url))
raise WinDownloaderException(error_string)
self.cache_proxy_info()
if self.debug:
console_write(u'\n WinINet Debug Proxy\n proxy: %s\n proxy bypass: %s\n proxy username: %s\n proxy password: %s\n ', (self.proxy, self.proxy_bypass, self.proxy_username, self.proxy_password))
self.use_count += 1
if (self.debug and created_connection):
if changed_to_online:
console_write(u'\n WinINet HTTP Debug General\n Internet Explorer was set to Work Offline, temporarily going online\n ')
if (self.scheme == 'https'):
cert_struct = self.read_option(http_connection, self.INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT)
if cert_struct.lpszIssuerInfo:
issuer_info = cert_struct.lpszIssuerInfo.decode('cp1252')
issuer_parts = issuer_info.split('\r\n')
else:
issuer_parts = ['No issuer info']
if cert_struct.lpszSubjectInfo:
subject_info = cert_struct.lpszSubjectInfo.decode('cp1252')
subject_parts = subject_info.split('\r\n')
else:
subject_parts = ['No subject info']
common_name = subject_parts[(-1)]
if ((cert_struct.ftStart.dwLowDateTime != 0) and (cert_struct.ftStart.dwHighDateTime != 0)):
issue_date = self.convert_filetime_to_datetime(cert_struct.ftStart)
issue_date = issue_date.strftime('%a, %d %b %Y %H:%M:%S GMT')
else:
issue_date = u'No issue date'
if ((cert_struct.ftExpiry.dwLowDateTime != 0) and (cert_struct.ftExpiry.dwHighDateTime != 0)):
expiration_date = self.convert_filetime_to_datetime(cert_struct.ftExpiry)
expiration_date = expiration_date.strftime('%a, %d %b %Y %H:%M:%S GMT')
else:
expiration_date = u'No expiration date'
console_write(u'\n WinINet HTTPS Debug General\n Server SSL Certificate:\n subject: %s\n issuer: %s\n common name: %s\n issue date: %s\n expire date: %s\n ', (u', '.join(subject_parts), u', '.join(issuer_parts), common_name, issue_date, expiration_date))
if self.debug:
other_headers = []
for (header, value) in request_headers.items():
other_headers.append((u'%s: %s' % (header, value)))
indented_headers = u'\n '.join(other_headers)
console_write(u'\n WinINet %s Debug Write\n GET %s HTTP/1.1\n User-Agent: %s\n Host: %s\n Connection: Keep-Alive\n Cache-Control: no-cache\n %s\n ', (self.scheme.upper(), path, self.settings.get('user_agent'), hostname, indented_headers))
header_buffer_size = 8192
try_again = True
while try_again:
try_again = False
to_read_was_read = wintypes.DWORD(header_buffer_size)
headers_buffer = ctypes.create_string_buffer(header_buffer_size)
success = wininet.HttpQueryInfoA(http_connection, self.HTTP_QUERY_RAW_HEADERS_CRLF, ctypes.byref(headers_buffer), ctypes.byref(to_read_was_read), None)
if (not success):
if (ctypes.GetLastError() != self.ERROR_INSUFFICIENT_BUFFER):
error_string = text.format(u'\n %s %s during header read phase of downloading %s.\n ', (error_message, self.extract_error(), url))
raise WinDownloaderException(error_string)
header_buffer_size = to_read_was_read.value
try_again = True
continue
headers = ''
if (to_read_was_read.value > 0):
headers += headers_buffer.raw[:to_read_was_read.value]
headers = headers.decode('iso-8859-1').rstrip('\r\n').split('\r\n')
if self.debug:
indented_headers = u'\n '.join(headers)
console_write(u'\n WinINet %s Debug Read\n %s\n ', (self.scheme.upper(), indented_headers))
buffer_length = 65536
output_buffer = ctypes.create_string_buffer(buffer_length)
bytes_read = wintypes.DWORD()
result = ''
try_again = True
while try_again:
try_again = False
wininet.InternetReadFile(http_connection, output_buffer, buffer_length, ctypes.byref(bytes_read))
if (bytes_read.value > 0):
result += output_buffer.raw[:bytes_read.value]
try_again = True
(general, headers) = self.parse_headers(headers)
self.handle_rate_limit(headers, url)
if ((general['status'] == 503) and (tries != 0)):
if (tries and self.debug):
console_write(u'\n Downloading %s was rate limited, trying again\n ', url)
continue
encoding = headers.get('content-encoding')
result = self.decode_response(encoding, result)
result = self.cache_result('get', url, general['status'], headers, result)
if (general['status'] not in [200, 304]):
raise HttpError(('HTTP error %s' % general['status']), general['status'])
return result
except (NonHttpError, HttpError) as e:
if (unicode_from_os(e).find('timed out') != (-1)):
if (tries and self.debug):
console_write(u'\n Downloading %s timed out, trying again\n ', url)
continue
error_string = text.format(u'\n %s %s downloading %s.\n ', (error_message, unicode_from_os(e), url))
finally:
if http_connection:
wininet.InternetCloseHandle(http_connection)
break
raise DownloaderException(error_string)
|
'Windows returns times as 64-bit unsigned longs that are the number
of hundreds of nanoseconds since Jan 1 1601. This converts it to
a datetime object.
:param filetime:
A FileTime struct object
:return:
A (UTC) datetime object'
| def convert_filetime_to_datetime(self, filetime):
| hundreds_nano_seconds = struct.unpack('>Q', struct.pack('>LL', filetime.dwHighDateTime, filetime.dwLowDateTime))[0]
seconds_since_1601 = (hundreds_nano_seconds / 10000000)
epoch_seconds = (seconds_since_1601 - 11644473600)
return datetime.datetime.fromtimestamp(epoch_seconds)
|
'Retrieves and formats an error from WinINet
:return:
A string with a nice description of the error'
| def extract_error(self):
| error_num = ctypes.GetLastError()
raw_error_string = ctypes.FormatError(error_num)
error_string = unicode_from_os(raw_error_string)
if (error_string == u'<no description>'):
error_lookup = {12007: u'host not found', 12029: u'connection refused', 12057: u'error checking for server certificate revocation', 12169: u'invalid secure certificate', 12157: u'secure channel error, server not providing SSL', 12002: u'operation timed out'}
if (error_num in error_lookup):
error_string = error_lookup[error_num]
if (error_string == u'<no description>'):
return (u'(errno %s)' % error_num)
error_string = (error_string[0].upper() + error_string[1:])
return (u'%s (errno %s)' % (error_string, error_num))
|
'Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests'
| def supports_ssl(self):
| return True
|
'Reads information about the internet connection, which may be a string or struct
:param handle:
The handle to query for the info
:param option:
The (int) option to get
:return:
A string, or one of the InternetCertificateInfo or InternetProxyInfo structs'
| def read_option(self, handle, option):
| option_buffer_size = 8192
try_again = True
while try_again:
try_again = False
to_read_was_read = wintypes.DWORD(option_buffer_size)
option_buffer = ctypes.create_string_buffer(option_buffer_size)
ref = ctypes.byref(option_buffer)
success = wininet.InternetQueryOptionA(handle, option, ref, ctypes.byref(to_read_was_read))
if (not success):
if (ctypes.GetLastError() != self.ERROR_INSUFFICIENT_BUFFER):
if (option == self.INTERNET_OPTION_PROXY):
return InternetProxyInfo()
raise NonHttpError(self.extract_error())
option_buffer_size = to_read_was_read.value
try_again = True
continue
if (option == self.INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT):
length = min(len(option_buffer), ctypes.sizeof(InternetCertificateInfo))
cert_info = InternetCertificateInfo()
ctypes.memmove(ctypes.addressof(cert_info), option_buffer, length)
return cert_info
elif (option == self.INTERNET_OPTION_PROXY):
length = min(len(option_buffer), ctypes.sizeof(InternetProxyInfo))
proxy_info = InternetProxyInfo()
ctypes.memmove(ctypes.addressof(proxy_info), option_buffer, length)
return proxy_info
else:
option = ''
if (to_read_was_read.value > 0):
option += option_buffer.raw[:to_read_was_read.value]
return option.decode('cp1252').rstrip('\x00')
|
'Parses HTTP headers into two dict objects
:param output:
An array of header lines
:return:
A tuple of (general, headers) where general is a dict with the keys:
`version` - HTTP version number (string)
`status` - HTTP status code (integer)
`message` - HTTP status message (string)
And headers is a dict with the keys being lower-case version of the
HTTP header names.'
| def parse_headers(self, output):
| general = {'version': '0.9', 'status': 200, 'message': 'OK'}
headers = {}
for line in output:
line = line.lstrip()
if (line.find('HTTP/') == 0):
match = re.match('HTTP/(\\d\\.\\d)\\s+(\\d+)\\s+(.*)$', line)
if match:
general['version'] = match.group(1)
general['status'] = int(match.group(2))
general['message'] = match.group(3)
else:
match = re.match('HTTP/(\\d\\.\\d)\\s+(\\d+)$', line)
general['version'] = match.group(1)
general['status'] = int(match.group(2))
message = self.HTTP_STATUS_MESSAGES[general['status']]
general['message'] = message
else:
(name, value) = line.split(':', 1)
headers[name.lower()] = value.strip()
return (general, headers)
|
'Creates a list of packages and what operation would be performed for
each. Allows filtering by the applicable action or package name.
Returns the information in a format suitable for displaying in the
quick panel.
:param ignore_actions:
A list of actions to ignore packages by. Valid actions include:
`install`, `upgrade`, `downgrade`, `reinstall`, `overwrite`,
`pull` and `none`. `pull` andd `none` are for Git and Hg
repositories. `pull` is present when incoming changes are detected,
where as `none` is selected if no commits are available. `overwrite`
is for packages that do not include version information via the
`package-metadata.json` file.
:param override_action:
A string action name to override the displayed action for all listed
packages.
:param ignore_packages:
A list of packages names that should not be returned in the list
:return:
A list of lists, each containing three strings:
0 - package name
1 - package description
2 - action; [extra info;] package url'
| def make_package_list(self, ignore_actions=[], override_action=None, ignore_packages=[]):
| packages = self.manager.list_available_packages()
installed_packages = self.manager.list_packages()
package_list = []
for package in sorted(iter(packages.keys()), key=(lambda s: s.lower())):
if (ignore_packages and (package in ignore_packages)):
continue
package_entry = [package]
info = packages[package]
release = info['releases'][0]
if (package in installed_packages):
installed = True
metadata = self.manager.get_metadata(package)
if metadata.get('version'):
installed_version = metadata['version']
else:
installed_version = None
else:
installed = False
installed_version_name = (('v' + installed_version) if (installed and installed_version) else 'unknown version')
new_version = ('v' + release['version'])
vcs = None
settings = self.manager.settings
if override_action:
action = override_action
extra = ''
else:
if self.manager.is_vcs_package(package):
to_ignore = settings.get('ignore_vcs_packages')
if (to_ignore is True):
continue
if (isinstance(to_ignore, list) and (package in to_ignore)):
continue
upgrader = self.manager.instantiate_upgrader(package)
vcs = upgrader.cli_name
incoming = upgrader.incoming()
if installed:
if vcs:
if incoming:
action = 'pull'
extra = (' with ' + vcs)
else:
action = 'none'
extra = ''
elif (not installed_version):
action = 'overwrite'
extra = (' %s with %s' % (installed_version_name, new_version))
else:
installed_version = version_comparable(installed_version)
new_version_cmp = version_comparable(release['version'])
if (new_version_cmp > installed_version):
action = 'upgrade'
extra = (' to %s from %s' % (new_version, installed_version_name))
elif (new_version_cmp < installed_version):
action = 'downgrade'
extra = (' to %s from %s' % (new_version, installed_version_name))
else:
action = 'reinstall'
extra = (' %s' % new_version)
else:
action = 'install'
extra = (' %s' % new_version)
extra += ';'
if (action in ignore_actions):
continue
description = info.get('description')
if (not description):
description = 'No description provided'
package_entry.append(description)
package_entry.append((((action + extra) + ' ') + re.sub('^https?://', '', info['homepage'])))
package_list.append(package_entry)
return package_list
|
'Quick panel user selection handler - disables a package, installs or
upgrades it, then re-enables the package
:param picked:
An integer of the 0-based package name index from the presented
list. -1 means the user cancelled.'
| def on_done(self, picked):
| if (picked == (-1)):
return
name = self.package_list[picked][0]
if (name in self.disable_packages(name, 'install')):
def on_complete():
self.reenable_package(name, 'install')
else:
on_complete = None
thread = PackageInstallerThread(self.manager, name, on_complete)
thread.start()
ThreadProgress(thread, ('Installing package %s' % name), ('Package %s successfully %s' % (name, self.completion_type)))
|
':param manager:
An instance of :class:`PackageManager`
:param package:
The string package name to install/upgrade
:param on_complete:
A callback to run after installing/upgrading the package
:param pause:
If we should pause before upgrading to allow a package to be
fully disabled.'
| def __init__(self, manager, package, on_complete, pause=False):
| self.package = package
self.manager = manager
self.on_complete = on_complete
self.pause = pause
threading.Thread.__init__(self)
|
'Indicates if this provider can handle the provided repo'
| @classmethod
def match_url(cls, repo):
| return (re.search('^https?://github.com/[^/]+/?$', repo) is not None)
|
'Go out and perform HTTP operations, caching the result'
| def prefetch(self):
| [name for (name, info) in self.get_packages()]
|
'List of any URLs that could not be accessed while accessing this repository
:raises:
DownloaderException: when there is an issue download package info
ClientException: when there is an issue parsing package info
:return:
A generator of ("https://github.com/user/repo", Exception()) tuples'
| def get_failed_sources(self):
| return self.failed_sources.items()
|
'For API-compatibility with RepositoryProvider'
| def get_broken_packages(self):
| return {}.items()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.