repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 52
3.87M
| func_documentation_string
stringlengths 1
47.2k
| func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|
eyurtsev/fcsparser | fcsparser/api.py | FCSParser.data | def data(self):
"""Get parsed DATA segment of the FCS file."""
if self._data is None:
with open(self.path, 'rb') as f:
self.read_data(f)
return self._data | python | def data(self):
"""Get parsed DATA segment of the FCS file."""
if self._data is None:
with open(self.path, 'rb') as f:
self.read_data(f)
return self._data | Get parsed DATA segment of the FCS file. | https://github.com/eyurtsev/fcsparser/blob/710e8e31d4b09ff6e73d47d86770be6ca2f4282c/fcsparser/api.py#L436-L441 |
eyurtsev/fcsparser | fcsparser/api.py | FCSParser.analysis | def analysis(self):
"""Get ANALYSIS segment of the FCS file."""
if self._analysis is None:
with open(self.path, 'rb') as f:
self.read_analysis(f)
return self._analysis | python | def analysis(self):
"""Get ANALYSIS segment of the FCS file."""
if self._analysis is None:
with open(self.path, 'rb') as f:
self.read_analysis(f)
return self._analysis | Get ANALYSIS segment of the FCS file. | https://github.com/eyurtsev/fcsparser/blob/710e8e31d4b09ff6e73d47d86770be6ca2f4282c/fcsparser/api.py#L444-L449 |
eyurtsev/fcsparser | fcsparser/api.py | FCSParser.reformat_meta | def reformat_meta(self):
"""Collect the meta data information in a more user friendly format.
Function looks through the meta data, collecting the channel related information into a
dataframe and moving it into the _channels_ key.
"""
meta = self.annotation # For shorthand (passed by reference)
channel_properties = []
for key, value in meta.items():
if key[:3] == '$P1':
if key[3] not in string.digits:
channel_properties.append(key[3:])
# Capture all the channel information in a list of lists -- used to create a data frame
channel_matrix = [
[meta.get('$P{0}{1}'.format(ch, p)) for p in channel_properties]
for ch in self.channel_numbers
]
# Remove this information from the dictionary
for ch in self.channel_numbers:
for p in channel_properties:
key = '$P{0}{1}'.format(ch, p)
if key in meta:
meta.pop(key)
num_channels = meta['$PAR']
column_names = ['$Pn{0}'.format(p) for p in channel_properties]
df = pd.DataFrame(channel_matrix, columns=column_names,
index=(1 + numpy.arange(num_channels)))
if '$PnE' in column_names:
df['$PnE'] = df['$PnE'].apply(lambda x: x.split(','))
df.index.name = 'Channel Number'
meta['_channels_'] = df
meta['_channel_names_'] = self.get_channel_names() | python | def reformat_meta(self):
"""Collect the meta data information in a more user friendly format.
Function looks through the meta data, collecting the channel related information into a
dataframe and moving it into the _channels_ key.
"""
meta = self.annotation # For shorthand (passed by reference)
channel_properties = []
for key, value in meta.items():
if key[:3] == '$P1':
if key[3] not in string.digits:
channel_properties.append(key[3:])
# Capture all the channel information in a list of lists -- used to create a data frame
channel_matrix = [
[meta.get('$P{0}{1}'.format(ch, p)) for p in channel_properties]
for ch in self.channel_numbers
]
# Remove this information from the dictionary
for ch in self.channel_numbers:
for p in channel_properties:
key = '$P{0}{1}'.format(ch, p)
if key in meta:
meta.pop(key)
num_channels = meta['$PAR']
column_names = ['$Pn{0}'.format(p) for p in channel_properties]
df = pd.DataFrame(channel_matrix, columns=column_names,
index=(1 + numpy.arange(num_channels)))
if '$PnE' in column_names:
df['$PnE'] = df['$PnE'].apply(lambda x: x.split(','))
df.index.name = 'Channel Number'
meta['_channels_'] = df
meta['_channel_names_'] = self.get_channel_names() | Collect the meta data information in a more user friendly format.
Function looks through the meta data, collecting the channel related information into a
dataframe and moving it into the _channels_ key. | https://github.com/eyurtsev/fcsparser/blob/710e8e31d4b09ff6e73d47d86770be6ca2f4282c/fcsparser/api.py#L451-L489 |
eyurtsev/fcsparser | fcsparser/api.py | FCSParser.dataframe | def dataframe(self):
"""Construct Pandas dataframe."""
data = self.data
channel_names = self.get_channel_names()
return pd.DataFrame(data, columns=channel_names) | python | def dataframe(self):
"""Construct Pandas dataframe."""
data = self.data
channel_names = self.get_channel_names()
return pd.DataFrame(data, columns=channel_names) | Construct Pandas dataframe. | https://github.com/eyurtsev/fcsparser/blob/710e8e31d4b09ff6e73d47d86770be6ca2f4282c/fcsparser/api.py#L492-L496 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.get_binary_dist | def get_binary_dist(self, requirement):
"""
Get or create a cached binary distribution archive.
:param requirement: A :class:`.Requirement` object.
:returns: An iterable of tuples with two values each: A
:class:`tarfile.TarInfo` object and a file-like object.
Gets the cached binary distribution that was previously built for the
given requirement. If no binary distribution has been cached yet, a new
binary distribution is built and added to the cache.
Uses :func:`build_binary_dist()` to build binary distribution
archives. If this fails with a build error :func:`get_binary_dist()`
will use :class:`.SystemPackageManager` to check for and install
missing system packages and retry the build when missing system
packages were installed.
"""
cache_file = self.cache.get(requirement)
if cache_file:
if self.needs_invalidation(requirement, cache_file):
logger.info("Invalidating old %s binary (source has changed) ..", requirement)
cache_file = None
else:
logger.debug("%s hasn't been cached yet, doing so now.", requirement)
if not cache_file:
# Build the binary distribution.
try:
raw_file = self.build_binary_dist(requirement)
except BuildFailed:
logger.warning("Build of %s failed, checking for missing dependencies ..", requirement)
if self.system_package_manager.install_dependencies(requirement):
raw_file = self.build_binary_dist(requirement)
else:
raise
# Transform the binary distribution archive into a form that we can re-use.
fd, transformed_file = tempfile.mkstemp(prefix='pip-accel-bdist-', suffix='.tar.gz')
try:
archive = tarfile.open(transformed_file, 'w:gz')
try:
for member, from_handle in self.transform_binary_dist(raw_file):
archive.addfile(member, from_handle)
finally:
archive.close()
# Push the binary distribution archive to all available backends.
with open(transformed_file, 'rb') as handle:
self.cache.put(requirement, handle)
finally:
# Close file descriptor before removing the temporary file.
# Without closing Windows is complaining that the file cannot
# be removed because it is used by another process.
os.close(fd)
# Cleanup the temporary file.
os.remove(transformed_file)
# Get the absolute pathname of the file in the local cache.
cache_file = self.cache.get(requirement)
# Enable checksum based cache invalidation.
self.persist_checksum(requirement, cache_file)
archive = tarfile.open(cache_file, 'r:gz')
try:
for member in archive.getmembers():
yield member, archive.extractfile(member.name)
finally:
archive.close() | python | def get_binary_dist(self, requirement):
"""
Get or create a cached binary distribution archive.
:param requirement: A :class:`.Requirement` object.
:returns: An iterable of tuples with two values each: A
:class:`tarfile.TarInfo` object and a file-like object.
Gets the cached binary distribution that was previously built for the
given requirement. If no binary distribution has been cached yet, a new
binary distribution is built and added to the cache.
Uses :func:`build_binary_dist()` to build binary distribution
archives. If this fails with a build error :func:`get_binary_dist()`
will use :class:`.SystemPackageManager` to check for and install
missing system packages and retry the build when missing system
packages were installed.
"""
cache_file = self.cache.get(requirement)
if cache_file:
if self.needs_invalidation(requirement, cache_file):
logger.info("Invalidating old %s binary (source has changed) ..", requirement)
cache_file = None
else:
logger.debug("%s hasn't been cached yet, doing so now.", requirement)
if not cache_file:
# Build the binary distribution.
try:
raw_file = self.build_binary_dist(requirement)
except BuildFailed:
logger.warning("Build of %s failed, checking for missing dependencies ..", requirement)
if self.system_package_manager.install_dependencies(requirement):
raw_file = self.build_binary_dist(requirement)
else:
raise
# Transform the binary distribution archive into a form that we can re-use.
fd, transformed_file = tempfile.mkstemp(prefix='pip-accel-bdist-', suffix='.tar.gz')
try:
archive = tarfile.open(transformed_file, 'w:gz')
try:
for member, from_handle in self.transform_binary_dist(raw_file):
archive.addfile(member, from_handle)
finally:
archive.close()
# Push the binary distribution archive to all available backends.
with open(transformed_file, 'rb') as handle:
self.cache.put(requirement, handle)
finally:
# Close file descriptor before removing the temporary file.
# Without closing Windows is complaining that the file cannot
# be removed because it is used by another process.
os.close(fd)
# Cleanup the temporary file.
os.remove(transformed_file)
# Get the absolute pathname of the file in the local cache.
cache_file = self.cache.get(requirement)
# Enable checksum based cache invalidation.
self.persist_checksum(requirement, cache_file)
archive = tarfile.open(cache_file, 'r:gz')
try:
for member in archive.getmembers():
yield member, archive.extractfile(member.name)
finally:
archive.close() | Get or create a cached binary distribution archive.
:param requirement: A :class:`.Requirement` object.
:returns: An iterable of tuples with two values each: A
:class:`tarfile.TarInfo` object and a file-like object.
Gets the cached binary distribution that was previously built for the
given requirement. If no binary distribution has been cached yet, a new
binary distribution is built and added to the cache.
Uses :func:`build_binary_dist()` to build binary distribution
archives. If this fails with a build error :func:`get_binary_dist()`
will use :class:`.SystemPackageManager` to check for and install
missing system packages and retry the build when missing system
packages were installed. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L59-L122 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.needs_invalidation | def needs_invalidation(self, requirement, cache_file):
"""
Check whether a cached binary distribution needs to be invalidated.
:param requirement: A :class:`.Requirement` object.
:param cache_file: The pathname of a cached binary distribution (a string).
:returns: :data:`True` if the cached binary distribution needs to be
invalidated, :data:`False` otherwise.
"""
if self.config.trust_mod_times:
return requirement.last_modified > os.path.getmtime(cache_file)
else:
checksum = self.recall_checksum(cache_file)
return checksum and checksum != requirement.checksum | python | def needs_invalidation(self, requirement, cache_file):
"""
Check whether a cached binary distribution needs to be invalidated.
:param requirement: A :class:`.Requirement` object.
:param cache_file: The pathname of a cached binary distribution (a string).
:returns: :data:`True` if the cached binary distribution needs to be
invalidated, :data:`False` otherwise.
"""
if self.config.trust_mod_times:
return requirement.last_modified > os.path.getmtime(cache_file)
else:
checksum = self.recall_checksum(cache_file)
return checksum and checksum != requirement.checksum | Check whether a cached binary distribution needs to be invalidated.
:param requirement: A :class:`.Requirement` object.
:param cache_file: The pathname of a cached binary distribution (a string).
:returns: :data:`True` if the cached binary distribution needs to be
invalidated, :data:`False` otherwise. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L124-L137 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.recall_checksum | def recall_checksum(self, cache_file):
"""
Get the checksum of the input used to generate a binary distribution archive.
:param cache_file: The pathname of the binary distribution archive (a string).
:returns: The checksum (a string) or :data:`None` (when no checksum is available).
"""
# EAFP instead of LBYL because of concurrency between pip-accel
# processes (https://docs.python.org/2/glossary.html#term-lbyl).
checksum_file = '%s.txt' % cache_file
try:
with open(checksum_file) as handle:
contents = handle.read()
return contents.strip()
except IOError as e:
if e.errno == errno.ENOENT:
# Gracefully handle missing checksum files.
return None
else:
# Don't swallow exceptions we don't expect!
raise | python | def recall_checksum(self, cache_file):
"""
Get the checksum of the input used to generate a binary distribution archive.
:param cache_file: The pathname of the binary distribution archive (a string).
:returns: The checksum (a string) or :data:`None` (when no checksum is available).
"""
# EAFP instead of LBYL because of concurrency between pip-accel
# processes (https://docs.python.org/2/glossary.html#term-lbyl).
checksum_file = '%s.txt' % cache_file
try:
with open(checksum_file) as handle:
contents = handle.read()
return contents.strip()
except IOError as e:
if e.errno == errno.ENOENT:
# Gracefully handle missing checksum files.
return None
else:
# Don't swallow exceptions we don't expect!
raise | Get the checksum of the input used to generate a binary distribution archive.
:param cache_file: The pathname of the binary distribution archive (a string).
:returns: The checksum (a string) or :data:`None` (when no checksum is available). | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L139-L159 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.persist_checksum | def persist_checksum(self, requirement, cache_file):
"""
Persist the checksum of the input used to generate a binary distribution.
:param requirement: A :class:`.Requirement` object.
:param cache_file: The pathname of a cached binary distribution (a string).
.. note:: The checksum is only calculated and persisted when
:attr:`~.Config.trust_mod_times` is :data:`False`.
"""
if not self.config.trust_mod_times:
checksum_file = '%s.txt' % cache_file
with AtomicReplace(checksum_file) as temporary_file:
with open(temporary_file, 'w') as handle:
handle.write('%s\n' % requirement.checksum) | python | def persist_checksum(self, requirement, cache_file):
"""
Persist the checksum of the input used to generate a binary distribution.
:param requirement: A :class:`.Requirement` object.
:param cache_file: The pathname of a cached binary distribution (a string).
.. note:: The checksum is only calculated and persisted when
:attr:`~.Config.trust_mod_times` is :data:`False`.
"""
if not self.config.trust_mod_times:
checksum_file = '%s.txt' % cache_file
with AtomicReplace(checksum_file) as temporary_file:
with open(temporary_file, 'w') as handle:
handle.write('%s\n' % requirement.checksum) | Persist the checksum of the input used to generate a binary distribution.
:param requirement: A :class:`.Requirement` object.
:param cache_file: The pathname of a cached binary distribution (a string).
.. note:: The checksum is only calculated and persisted when
:attr:`~.Config.trust_mod_times` is :data:`False`. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L161-L175 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.build_binary_dist | def build_binary_dist(self, requirement):
"""
Build a binary distribution archive from an unpacked source distribution.
:param requirement: A :class:`.Requirement` object.
:returns: The pathname of a binary distribution archive (a string).
:raises: :exc:`.BinaryDistributionError` when the original command
and the fall back both fail to produce a binary distribution
archive.
This method uses the following command to build binary distributions:
.. code-block:: sh
$ python setup.py bdist_dumb --format=tar
This command can fail for two main reasons:
1. The package is missing binary dependencies.
2. The ``setup.py`` script doesn't (properly) implement ``bdist_dumb``
binary distribution format support.
The first case is dealt with in :func:`get_binary_dist()`. To deal
with the second case this method falls back to the following command:
.. code-block:: sh
$ python setup.py bdist
This fall back is almost never needed, but there are Python packages
out there which require this fall back (this method was added because
the installation of ``Paver==1.2.3`` failed, see `issue 37`_ for
details about that).
.. _issue 37: https://github.com/paylogic/pip-accel/issues/37
"""
try:
return self.build_binary_dist_helper(requirement, ['bdist_dumb', '--format=tar'])
except (BuildFailed, NoBuildOutput):
logger.warning("Build of %s failed, falling back to alternative method ..", requirement)
return self.build_binary_dist_helper(requirement, ['bdist', '--formats=gztar']) | python | def build_binary_dist(self, requirement):
"""
Build a binary distribution archive from an unpacked source distribution.
:param requirement: A :class:`.Requirement` object.
:returns: The pathname of a binary distribution archive (a string).
:raises: :exc:`.BinaryDistributionError` when the original command
and the fall back both fail to produce a binary distribution
archive.
This method uses the following command to build binary distributions:
.. code-block:: sh
$ python setup.py bdist_dumb --format=tar
This command can fail for two main reasons:
1. The package is missing binary dependencies.
2. The ``setup.py`` script doesn't (properly) implement ``bdist_dumb``
binary distribution format support.
The first case is dealt with in :func:`get_binary_dist()`. To deal
with the second case this method falls back to the following command:
.. code-block:: sh
$ python setup.py bdist
This fall back is almost never needed, but there are Python packages
out there which require this fall back (this method was added because
the installation of ``Paver==1.2.3`` failed, see `issue 37`_ for
details about that).
.. _issue 37: https://github.com/paylogic/pip-accel/issues/37
"""
try:
return self.build_binary_dist_helper(requirement, ['bdist_dumb', '--format=tar'])
except (BuildFailed, NoBuildOutput):
logger.warning("Build of %s failed, falling back to alternative method ..", requirement)
return self.build_binary_dist_helper(requirement, ['bdist', '--formats=gztar']) | Build a binary distribution archive from an unpacked source distribution.
:param requirement: A :class:`.Requirement` object.
:returns: The pathname of a binary distribution archive (a string).
:raises: :exc:`.BinaryDistributionError` when the original command
and the fall back both fail to produce a binary distribution
archive.
This method uses the following command to build binary distributions:
.. code-block:: sh
$ python setup.py bdist_dumb --format=tar
This command can fail for two main reasons:
1. The package is missing binary dependencies.
2. The ``setup.py`` script doesn't (properly) implement ``bdist_dumb``
binary distribution format support.
The first case is dealt with in :func:`get_binary_dist()`. To deal
with the second case this method falls back to the following command:
.. code-block:: sh
$ python setup.py bdist
This fall back is almost never needed, but there are Python packages
out there which require this fall back (this method was added because
the installation of ``Paver==1.2.3`` failed, see `issue 37`_ for
details about that).
.. _issue 37: https://github.com/paylogic/pip-accel/issues/37 | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L177-L217 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.build_binary_dist_helper | def build_binary_dist_helper(self, requirement, setup_command):
"""
Convert an unpacked source distribution to a binary distribution.
:param requirement: A :class:`.Requirement` object.
:param setup_command: A list of strings with the arguments to
``setup.py``.
:returns: The pathname of the resulting binary distribution (a string).
:raises: :exc:`.BuildFailed` when the build reports an error (e.g.
because of missing binary dependencies like system
libraries).
:raises: :exc:`.NoBuildOutput` when the build does not produce the
expected binary distribution archive.
"""
build_timer = Timer()
# Make sure the source distribution contains a setup script.
setup_script = os.path.join(requirement.source_directory, 'setup.py')
if not os.path.isfile(setup_script):
msg = "Directory %s (%s %s) doesn't contain a source distribution!"
raise InvalidSourceDistribution(msg % (requirement.source_directory, requirement.name, requirement.version))
# Let the user know what's going on.
build_text = "Building %s binary distribution" % requirement
logger.info("%s ..", build_text)
# Cleanup previously generated distributions.
dist_directory = os.path.join(requirement.source_directory, 'dist')
if os.path.isdir(dist_directory):
logger.debug("Cleaning up previously generated distributions in %s ..", dist_directory)
shutil.rmtree(dist_directory)
# Let the user know (approximately) which command is being executed
# (I don't think it's necessary to show them the nasty details :-).
logger.debug("Executing external command: %s",
' '.join(map(pipes.quote, [self.config.python_executable, 'setup.py'] + setup_command)))
# Compose the command line needed to build the binary distribution.
# This nasty command line forces the use of setuptools (instead of
# distutils) just like pip does. This will cause the `*.egg-info'
# metadata to be written to a directory instead of a file, which
# (amongst other things) enables tracking of installed files.
command_line = [
self.config.python_executable, '-c',
';'.join([
'import setuptools',
'__file__=%r' % setup_script,
r"exec(compile(open(__file__).read().replace('\r\n', '\n'), __file__, 'exec'))",
])
] + setup_command
# Redirect all output of the build to a temporary file.
fd, temporary_file = tempfile.mkstemp()
try:
# Start the build.
build = subprocess.Popen(command_line, cwd=requirement.source_directory, stdout=fd, stderr=fd)
# Wait for the build to finish and provide feedback to the user in the mean time.
spinner = Spinner(label=build_text, timer=build_timer)
while build.poll() is None:
spinner.step()
# Don't tax the CPU too much.
time.sleep(0.2)
spinner.clear()
# Make sure the build succeeded and produced a binary distribution archive.
try:
# If the build reported an error we'll try to provide the user with
# some hints about what went wrong.
if build.returncode != 0:
raise BuildFailed("Failed to build {name} ({version}) binary distribution!",
name=requirement.name, version=requirement.version)
# Check if the build created the `dist' directory (the os.listdir()
# call below will raise an exception if we don't check for this).
if not os.path.isdir(dist_directory):
raise NoBuildOutput("Build of {name} ({version}) did not produce a binary distribution archive!",
name=requirement.name, version=requirement.version)
# Check if we can find the binary distribution archive.
filenames = os.listdir(dist_directory)
if len(filenames) != 1:
variables = dict(name=requirement.name,
version=requirement.version,
filenames=concatenate(sorted(filenames)))
raise NoBuildOutput("""
Build of {name} ({version}) produced more than one
distribution archive! (matches: {filenames})
""", **variables)
except Exception as e:
# Decorate the exception with the output of the failed build.
with open(temporary_file) as handle:
build_output = handle.read()
enhanced_message = compact("""
{message}
Please check the build output because it will probably
provide a hint about what went wrong.
Build output:
{output}
""", message=e.args[0], output=build_output.strip())
e.args = (enhanced_message,)
raise
logger.info("Finished building %s in %s.", requirement.name, build_timer)
return os.path.join(dist_directory, filenames[0])
finally:
# Close file descriptor before removing the temporary file.
# Without closing Windows is complaining that the file cannot
# be removed because it is used by another process.
os.close(fd)
os.unlink(temporary_file) | python | def build_binary_dist_helper(self, requirement, setup_command):
"""
Convert an unpacked source distribution to a binary distribution.
:param requirement: A :class:`.Requirement` object.
:param setup_command: A list of strings with the arguments to
``setup.py``.
:returns: The pathname of the resulting binary distribution (a string).
:raises: :exc:`.BuildFailed` when the build reports an error (e.g.
because of missing binary dependencies like system
libraries).
:raises: :exc:`.NoBuildOutput` when the build does not produce the
expected binary distribution archive.
"""
build_timer = Timer()
# Make sure the source distribution contains a setup script.
setup_script = os.path.join(requirement.source_directory, 'setup.py')
if not os.path.isfile(setup_script):
msg = "Directory %s (%s %s) doesn't contain a source distribution!"
raise InvalidSourceDistribution(msg % (requirement.source_directory, requirement.name, requirement.version))
# Let the user know what's going on.
build_text = "Building %s binary distribution" % requirement
logger.info("%s ..", build_text)
# Cleanup previously generated distributions.
dist_directory = os.path.join(requirement.source_directory, 'dist')
if os.path.isdir(dist_directory):
logger.debug("Cleaning up previously generated distributions in %s ..", dist_directory)
shutil.rmtree(dist_directory)
# Let the user know (approximately) which command is being executed
# (I don't think it's necessary to show them the nasty details :-).
logger.debug("Executing external command: %s",
' '.join(map(pipes.quote, [self.config.python_executable, 'setup.py'] + setup_command)))
# Compose the command line needed to build the binary distribution.
# This nasty command line forces the use of setuptools (instead of
# distutils) just like pip does. This will cause the `*.egg-info'
# metadata to be written to a directory instead of a file, which
# (amongst other things) enables tracking of installed files.
command_line = [
self.config.python_executable, '-c',
';'.join([
'import setuptools',
'__file__=%r' % setup_script,
r"exec(compile(open(__file__).read().replace('\r\n', '\n'), __file__, 'exec'))",
])
] + setup_command
# Redirect all output of the build to a temporary file.
fd, temporary_file = tempfile.mkstemp()
try:
# Start the build.
build = subprocess.Popen(command_line, cwd=requirement.source_directory, stdout=fd, stderr=fd)
# Wait for the build to finish and provide feedback to the user in the mean time.
spinner = Spinner(label=build_text, timer=build_timer)
while build.poll() is None:
spinner.step()
# Don't tax the CPU too much.
time.sleep(0.2)
spinner.clear()
# Make sure the build succeeded and produced a binary distribution archive.
try:
# If the build reported an error we'll try to provide the user with
# some hints about what went wrong.
if build.returncode != 0:
raise BuildFailed("Failed to build {name} ({version}) binary distribution!",
name=requirement.name, version=requirement.version)
# Check if the build created the `dist' directory (the os.listdir()
# call below will raise an exception if we don't check for this).
if not os.path.isdir(dist_directory):
raise NoBuildOutput("Build of {name} ({version}) did not produce a binary distribution archive!",
name=requirement.name, version=requirement.version)
# Check if we can find the binary distribution archive.
filenames = os.listdir(dist_directory)
if len(filenames) != 1:
variables = dict(name=requirement.name,
version=requirement.version,
filenames=concatenate(sorted(filenames)))
raise NoBuildOutput("""
Build of {name} ({version}) produced more than one
distribution archive! (matches: {filenames})
""", **variables)
except Exception as e:
# Decorate the exception with the output of the failed build.
with open(temporary_file) as handle:
build_output = handle.read()
enhanced_message = compact("""
{message}
Please check the build output because it will probably
provide a hint about what went wrong.
Build output:
{output}
""", message=e.args[0], output=build_output.strip())
e.args = (enhanced_message,)
raise
logger.info("Finished building %s in %s.", requirement.name, build_timer)
return os.path.join(dist_directory, filenames[0])
finally:
# Close file descriptor before removing the temporary file.
# Without closing Windows is complaining that the file cannot
# be removed because it is used by another process.
os.close(fd)
os.unlink(temporary_file) | Convert an unpacked source distribution to a binary distribution.
:param requirement: A :class:`.Requirement` object.
:param setup_command: A list of strings with the arguments to
``setup.py``.
:returns: The pathname of the resulting binary distribution (a string).
:raises: :exc:`.BuildFailed` when the build reports an error (e.g.
because of missing binary dependencies like system
libraries).
:raises: :exc:`.NoBuildOutput` when the build does not produce the
expected binary distribution archive. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L219-L321 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.transform_binary_dist | def transform_binary_dist(self, archive_path):
"""
Transform binary distributions into a form that can be cached for future use.
:param archive_path: The pathname of the original binary distribution archive.
:returns: An iterable of tuples with two values each:
1. A :class:`tarfile.TarInfo` object.
2. A file-like object.
This method transforms a binary distribution archive created by
:func:`build_binary_dist()` into a form that can be cached for future
use. This comes down to making the pathnames inside the archive
relative to the `prefix` that the binary distribution was built for.
"""
# Copy the tar archive file by file so we can rewrite the pathnames.
logger.debug("Transforming binary distribution: %s.", archive_path)
archive = tarfile.open(archive_path, 'r')
for member in archive.getmembers():
# Some source distribution archives on PyPI that are distributed as ZIP
# archives contain really weird permissions: the world readable bit is
# missing. I've encountered this with the httplib2 (0.9) and
# google-api-python-client (1.2) packages. I assume this is a bug of
# some kind in the packaging process on "their" side.
if member.mode & stat.S_IXUSR:
# If the owner has execute permissions we'll give everyone read and
# execute permissions (only the owner gets write permissions).
member.mode = 0o755
else:
# If the owner doesn't have execute permissions we'll give everyone
# read permissions (only the owner gets write permissions).
member.mode = 0o644
# In my testing the `dumb' tar files created with the `python
# setup.py bdist' and `python setup.py bdist_dumb' commands contain
# pathnames that are relative to `/' in one way or another:
#
# - In almost all cases the pathnames look like this:
#
# ./home/peter/.virtualenvs/pip-accel/lib/python2.7/site-packages/pip_accel/__init__.py
#
# - After working on pip-accel for several years I encountered
# a pathname like this (Python 2.6 on Mac OS X 10.10.5):
#
# Users/peter/.virtualenvs/pip-accel/lib/python2.6/site-packages/pip_accel/__init__.py
#
# Both of the above pathnames are relative to `/' but in different
# ways :-). The following normpath(join('/', ...))) pathname
# manipulation logic is intended to handle both cases.
original_pathname = member.name
absolute_pathname = os.path.normpath(os.path.join('/', original_pathname))
if member.isdev():
logger.warn("Ignoring device file: %s.", absolute_pathname)
elif not member.isdir():
modified_pathname = os.path.relpath(absolute_pathname, self.config.install_prefix)
if os.path.isabs(modified_pathname):
logger.warn("Failed to transform pathname in binary distribution"
" to relative path! (original: %r, modified: %r)",
original_pathname, modified_pathname)
else:
# Rewrite /usr/local to /usr (same goes for all prefixes of course).
modified_pathname = re.sub('^local/', '', modified_pathname)
# Rewrite /dist-packages/ to /site-packages/. For details see
# https://wiki.debian.org/Python#Deviations_from_upstream.
if self.config.on_debian:
modified_pathname = modified_pathname.replace('/dist-packages/', '/site-packages/')
# Enable operators to debug the transformation process.
logger.debug("Transformed %r -> %r.", original_pathname, modified_pathname)
# Get the file data from the input archive.
handle = archive.extractfile(original_pathname)
# Yield the modified metadata and a handle to the data.
member.name = modified_pathname
yield member, handle
archive.close() | python | def transform_binary_dist(self, archive_path):
"""
Transform binary distributions into a form that can be cached for future use.
:param archive_path: The pathname of the original binary distribution archive.
:returns: An iterable of tuples with two values each:
1. A :class:`tarfile.TarInfo` object.
2. A file-like object.
This method transforms a binary distribution archive created by
:func:`build_binary_dist()` into a form that can be cached for future
use. This comes down to making the pathnames inside the archive
relative to the `prefix` that the binary distribution was built for.
"""
# Copy the tar archive file by file so we can rewrite the pathnames.
logger.debug("Transforming binary distribution: %s.", archive_path)
archive = tarfile.open(archive_path, 'r')
for member in archive.getmembers():
# Some source distribution archives on PyPI that are distributed as ZIP
# archives contain really weird permissions: the world readable bit is
# missing. I've encountered this with the httplib2 (0.9) and
# google-api-python-client (1.2) packages. I assume this is a bug of
# some kind in the packaging process on "their" side.
if member.mode & stat.S_IXUSR:
# If the owner has execute permissions we'll give everyone read and
# execute permissions (only the owner gets write permissions).
member.mode = 0o755
else:
# If the owner doesn't have execute permissions we'll give everyone
# read permissions (only the owner gets write permissions).
member.mode = 0o644
# In my testing the `dumb' tar files created with the `python
# setup.py bdist' and `python setup.py bdist_dumb' commands contain
# pathnames that are relative to `/' in one way or another:
#
# - In almost all cases the pathnames look like this:
#
# ./home/peter/.virtualenvs/pip-accel/lib/python2.7/site-packages/pip_accel/__init__.py
#
# - After working on pip-accel for several years I encountered
# a pathname like this (Python 2.6 on Mac OS X 10.10.5):
#
# Users/peter/.virtualenvs/pip-accel/lib/python2.6/site-packages/pip_accel/__init__.py
#
# Both of the above pathnames are relative to `/' but in different
# ways :-). The following normpath(join('/', ...))) pathname
# manipulation logic is intended to handle both cases.
original_pathname = member.name
absolute_pathname = os.path.normpath(os.path.join('/', original_pathname))
if member.isdev():
logger.warn("Ignoring device file: %s.", absolute_pathname)
elif not member.isdir():
modified_pathname = os.path.relpath(absolute_pathname, self.config.install_prefix)
if os.path.isabs(modified_pathname):
logger.warn("Failed to transform pathname in binary distribution"
" to relative path! (original: %r, modified: %r)",
original_pathname, modified_pathname)
else:
# Rewrite /usr/local to /usr (same goes for all prefixes of course).
modified_pathname = re.sub('^local/', '', modified_pathname)
# Rewrite /dist-packages/ to /site-packages/. For details see
# https://wiki.debian.org/Python#Deviations_from_upstream.
if self.config.on_debian:
modified_pathname = modified_pathname.replace('/dist-packages/', '/site-packages/')
# Enable operators to debug the transformation process.
logger.debug("Transformed %r -> %r.", original_pathname, modified_pathname)
# Get the file data from the input archive.
handle = archive.extractfile(original_pathname)
# Yield the modified metadata and a handle to the data.
member.name = modified_pathname
yield member, handle
archive.close() | Transform binary distributions into a form that can be cached for future use.
:param archive_path: The pathname of the original binary distribution archive.
:returns: An iterable of tuples with two values each:
1. A :class:`tarfile.TarInfo` object.
2. A file-like object.
This method transforms a binary distribution archive created by
:func:`build_binary_dist()` into a form that can be cached for future
use. This comes down to making the pathnames inside the archive
relative to the `prefix` that the binary distribution was built for. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L323-L395 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.install_binary_dist | def install_binary_dist(self, members, virtualenv_compatible=True, prefix=None,
python=None, track_installed_files=False):
"""
Install a binary distribution into the given prefix.
:param members: An iterable of tuples with two values each:
1. A :class:`tarfile.TarInfo` object.
2. A file-like object.
:param prefix: The "prefix" under which the requirements should be
installed. This will be a pathname like ``/usr``,
``/usr/local`` or the pathname of a virtual environment.
Defaults to :attr:`.Config.install_prefix`.
:param python: The pathname of the Python executable to use in the shebang
line of all executable Python scripts inside the binary
distribution. Defaults to :attr:`.Config.python_executable`.
:param virtualenv_compatible: Whether to enable workarounds to make the
resulting filenames compatible with
virtual environments (defaults to
:data:`True`).
:param track_installed_files: If this is :data:`True` (not the default for
this method because of backwards
compatibility) pip-accel will create
``installed-files.txt`` as required by
pip to properly uninstall packages.
This method installs a binary distribution created by
:class:`build_binary_dist()` into the given prefix (a directory like
``/usr``, ``/usr/local`` or a virtual environment).
"""
# TODO This is quite slow for modules like Django. Speed it up! Two choices:
# 1. Run the external tar program to unpack the archive. This will
# slightly complicate the fixing up of hashbangs.
# 2. Using links? The plan: We can maintain a "seed" environment under
# $PIP_ACCEL_CACHE and use symbolic and/or hard links to populate other
# places based on the "seed" environment.
module_search_path = set(map(os.path.normpath, sys.path))
prefix = os.path.normpath(prefix or self.config.install_prefix)
python = os.path.normpath(python or self.config.python_executable)
installed_files = []
for member, from_handle in members:
pathname = member.name
if virtualenv_compatible:
# Some binary distributions include C header files (see for example
# the greenlet package) however the subdirectory of include/ in a
# virtual environment is a symbolic link to a subdirectory of
# /usr/include/ so we should never try to install C header files
# inside the directory pointed to by the symbolic link. Instead we
# implement the same workaround that pip uses to avoid this
# problem.
pathname = re.sub('^include/', 'include/site/', pathname)
if self.config.on_debian and '/site-packages/' in pathname:
# On Debian based system wide Python installs the /site-packages/
# directory is not in Python's module search path while
# /dist-packages/ is. We try to be compatible with this.
match = re.match('^(.+?)/site-packages', pathname)
if match:
site_packages = os.path.normpath(os.path.join(prefix, match.group(0)))
dist_packages = os.path.normpath(os.path.join(prefix, match.group(1), 'dist-packages'))
if dist_packages in module_search_path and site_packages not in module_search_path:
pathname = pathname.replace('/site-packages/', '/dist-packages/')
pathname = os.path.join(prefix, pathname)
if track_installed_files:
# Track the installed file's absolute pathname.
installed_files.append(pathname)
directory = os.path.dirname(pathname)
if not os.path.isdir(directory):
logger.debug("Creating directory: %s ..", directory)
makedirs(directory)
logger.debug("Creating file: %s ..", pathname)
with open(pathname, 'wb') as to_handle:
contents = from_handle.read()
if contents.startswith(b'#!/'):
contents = self.fix_hashbang(contents, python)
to_handle.write(contents)
os.chmod(pathname, member.mode)
if track_installed_files:
self.update_installed_files(installed_files) | python | def install_binary_dist(self, members, virtualenv_compatible=True, prefix=None,
python=None, track_installed_files=False):
"""
Install a binary distribution into the given prefix.
:param members: An iterable of tuples with two values each:
1. A :class:`tarfile.TarInfo` object.
2. A file-like object.
:param prefix: The "prefix" under which the requirements should be
installed. This will be a pathname like ``/usr``,
``/usr/local`` or the pathname of a virtual environment.
Defaults to :attr:`.Config.install_prefix`.
:param python: The pathname of the Python executable to use in the shebang
line of all executable Python scripts inside the binary
distribution. Defaults to :attr:`.Config.python_executable`.
:param virtualenv_compatible: Whether to enable workarounds to make the
resulting filenames compatible with
virtual environments (defaults to
:data:`True`).
:param track_installed_files: If this is :data:`True` (not the default for
this method because of backwards
compatibility) pip-accel will create
``installed-files.txt`` as required by
pip to properly uninstall packages.
This method installs a binary distribution created by
:class:`build_binary_dist()` into the given prefix (a directory like
``/usr``, ``/usr/local`` or a virtual environment).
"""
# TODO This is quite slow for modules like Django. Speed it up! Two choices:
# 1. Run the external tar program to unpack the archive. This will
# slightly complicate the fixing up of hashbangs.
# 2. Using links? The plan: We can maintain a "seed" environment under
# $PIP_ACCEL_CACHE and use symbolic and/or hard links to populate other
# places based on the "seed" environment.
module_search_path = set(map(os.path.normpath, sys.path))
prefix = os.path.normpath(prefix or self.config.install_prefix)
python = os.path.normpath(python or self.config.python_executable)
installed_files = []
for member, from_handle in members:
pathname = member.name
if virtualenv_compatible:
# Some binary distributions include C header files (see for example
# the greenlet package) however the subdirectory of include/ in a
# virtual environment is a symbolic link to a subdirectory of
# /usr/include/ so we should never try to install C header files
# inside the directory pointed to by the symbolic link. Instead we
# implement the same workaround that pip uses to avoid this
# problem.
pathname = re.sub('^include/', 'include/site/', pathname)
if self.config.on_debian and '/site-packages/' in pathname:
# On Debian based system wide Python installs the /site-packages/
# directory is not in Python's module search path while
# /dist-packages/ is. We try to be compatible with this.
match = re.match('^(.+?)/site-packages', pathname)
if match:
site_packages = os.path.normpath(os.path.join(prefix, match.group(0)))
dist_packages = os.path.normpath(os.path.join(prefix, match.group(1), 'dist-packages'))
if dist_packages in module_search_path and site_packages not in module_search_path:
pathname = pathname.replace('/site-packages/', '/dist-packages/')
pathname = os.path.join(prefix, pathname)
if track_installed_files:
# Track the installed file's absolute pathname.
installed_files.append(pathname)
directory = os.path.dirname(pathname)
if not os.path.isdir(directory):
logger.debug("Creating directory: %s ..", directory)
makedirs(directory)
logger.debug("Creating file: %s ..", pathname)
with open(pathname, 'wb') as to_handle:
contents = from_handle.read()
if contents.startswith(b'#!/'):
contents = self.fix_hashbang(contents, python)
to_handle.write(contents)
os.chmod(pathname, member.mode)
if track_installed_files:
self.update_installed_files(installed_files) | Install a binary distribution into the given prefix.
:param members: An iterable of tuples with two values each:
1. A :class:`tarfile.TarInfo` object.
2. A file-like object.
:param prefix: The "prefix" under which the requirements should be
installed. This will be a pathname like ``/usr``,
``/usr/local`` or the pathname of a virtual environment.
Defaults to :attr:`.Config.install_prefix`.
:param python: The pathname of the Python executable to use in the shebang
line of all executable Python scripts inside the binary
distribution. Defaults to :attr:`.Config.python_executable`.
:param virtualenv_compatible: Whether to enable workarounds to make the
resulting filenames compatible with
virtual environments (defaults to
:data:`True`).
:param track_installed_files: If this is :data:`True` (not the default for
this method because of backwards
compatibility) pip-accel will create
``installed-files.txt`` as required by
pip to properly uninstall packages.
This method installs a binary distribution created by
:class:`build_binary_dist()` into the given prefix (a directory like
``/usr``, ``/usr/local`` or a virtual environment). | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L397-L474 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.fix_hashbang | def fix_hashbang(self, contents, python):
"""
Rewrite hashbangs_ to use the correct Python executable.
:param contents: The contents of the script whose hashbang should be
fixed (a string).
:param python: The absolute pathname of the Python executable (a
string).
:returns: The modified contents of the script (a string).
.. _hashbangs: http://en.wikipedia.org/wiki/Shebang_(Unix)
"""
lines = contents.splitlines()
if lines:
hashbang = lines[0]
# Get the base name of the command in the hashbang.
executable = os.path.basename(hashbang)
# Deal with hashbangs like `#!/usr/bin/env python'.
executable = re.sub(b'^env ', b'', executable)
# Only rewrite hashbangs that actually involve Python.
if re.match(b'^python(\\d+(\\.\\d+)*)?$', executable):
lines[0] = b'#!' + python.encode('ascii')
logger.debug("Rewriting hashbang %r to %r!", hashbang, lines[0])
contents = b'\n'.join(lines)
return contents | python | def fix_hashbang(self, contents, python):
"""
Rewrite hashbangs_ to use the correct Python executable.
:param contents: The contents of the script whose hashbang should be
fixed (a string).
:param python: The absolute pathname of the Python executable (a
string).
:returns: The modified contents of the script (a string).
.. _hashbangs: http://en.wikipedia.org/wiki/Shebang_(Unix)
"""
lines = contents.splitlines()
if lines:
hashbang = lines[0]
# Get the base name of the command in the hashbang.
executable = os.path.basename(hashbang)
# Deal with hashbangs like `#!/usr/bin/env python'.
executable = re.sub(b'^env ', b'', executable)
# Only rewrite hashbangs that actually involve Python.
if re.match(b'^python(\\d+(\\.\\d+)*)?$', executable):
lines[0] = b'#!' + python.encode('ascii')
logger.debug("Rewriting hashbang %r to %r!", hashbang, lines[0])
contents = b'\n'.join(lines)
return contents | Rewrite hashbangs_ to use the correct Python executable.
:param contents: The contents of the script whose hashbang should be
fixed (a string).
:param python: The absolute pathname of the Python executable (a
string).
:returns: The modified contents of the script (a string).
.. _hashbangs: http://en.wikipedia.org/wiki/Shebang_(Unix) | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L476-L500 |
paylogic/pip-accel | pip_accel/bdist.py | BinaryDistributionManager.update_installed_files | def update_installed_files(self, installed_files):
"""
Track the files installed by a package so pip knows how to remove the package.
This method is used by :func:`install_binary_dist()` (which collects
the list of installed files for :func:`update_installed_files()`).
:param installed_files: A list of absolute pathnames (strings) with the
files that were just installed.
"""
# Find the *.egg-info directory where installed-files.txt should be created.
pkg_info_files = [fn for fn in installed_files if fnmatch.fnmatch(fn, '*.egg-info/PKG-INFO')]
# I'm not (yet) sure how reliable the above logic is, so for now
# I'll err on the side of caution and only act when the results
# seem to be reliable.
if len(pkg_info_files) != 1:
logger.warning("Not tracking installed files (couldn't reliably determine *.egg-info directory)")
else:
egg_info_directory = os.path.dirname(pkg_info_files[0])
installed_files_path = os.path.join(egg_info_directory, 'installed-files.txt')
logger.debug("Tracking installed files in %s ..", installed_files_path)
with open(installed_files_path, 'w') as handle:
for pathname in installed_files:
handle.write('%s\n' % os.path.relpath(pathname, egg_info_directory)) | python | def update_installed_files(self, installed_files):
"""
Track the files installed by a package so pip knows how to remove the package.
This method is used by :func:`install_binary_dist()` (which collects
the list of installed files for :func:`update_installed_files()`).
:param installed_files: A list of absolute pathnames (strings) with the
files that were just installed.
"""
# Find the *.egg-info directory where installed-files.txt should be created.
pkg_info_files = [fn for fn in installed_files if fnmatch.fnmatch(fn, '*.egg-info/PKG-INFO')]
# I'm not (yet) sure how reliable the above logic is, so for now
# I'll err on the side of caution and only act when the results
# seem to be reliable.
if len(pkg_info_files) != 1:
logger.warning("Not tracking installed files (couldn't reliably determine *.egg-info directory)")
else:
egg_info_directory = os.path.dirname(pkg_info_files[0])
installed_files_path = os.path.join(egg_info_directory, 'installed-files.txt')
logger.debug("Tracking installed files in %s ..", installed_files_path)
with open(installed_files_path, 'w') as handle:
for pathname in installed_files:
handle.write('%s\n' % os.path.relpath(pathname, egg_info_directory)) | Track the files installed by a package so pip knows how to remove the package.
This method is used by :func:`install_binary_dist()` (which collects
the list of installed files for :func:`update_installed_files()`).
:param installed_files: A list of absolute pathnames (strings) with the
files that were just installed. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/bdist.py#L502-L525 |
paylogic/pip-accel | pip_accel/config.py | Config.available_configuration_files | def available_configuration_files(self):
"""A list of strings with the absolute pathnames of the available configuration files."""
known_files = [GLOBAL_CONFIG, LOCAL_CONFIG, self.environment.get('PIP_ACCEL_CONFIG')]
absolute_paths = [parse_path(pathname) for pathname in known_files if pathname]
return [pathname for pathname in absolute_paths if os.path.isfile(pathname)] | python | def available_configuration_files(self):
"""A list of strings with the absolute pathnames of the available configuration files."""
known_files = [GLOBAL_CONFIG, LOCAL_CONFIG, self.environment.get('PIP_ACCEL_CONFIG')]
absolute_paths = [parse_path(pathname) for pathname in known_files if pathname]
return [pathname for pathname in absolute_paths if os.path.isfile(pathname)] | A list of strings with the absolute pathnames of the available configuration files. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L117-L121 |
paylogic/pip-accel | pip_accel/config.py | Config.load_configuration_file | def load_configuration_file(self, configuration_file):
"""
Load configuration defaults from a configuration file.
:param configuration_file: The pathname of a configuration file (a
string).
:raises: :exc:`Exception` when the configuration file cannot be
loaded.
"""
configuration_file = parse_path(configuration_file)
logger.debug("Loading configuration file: %s", configuration_file)
parser = configparser.RawConfigParser()
files_loaded = parser.read(configuration_file)
if len(files_loaded) != 1:
msg = "Failed to load configuration file! (%s)"
raise Exception(msg % configuration_file)
elif not parser.has_section('pip-accel'):
msg = "Missing 'pip-accel' section in configuration file! (%s)"
raise Exception(msg % configuration_file)
else:
self.configuration.update(parser.items('pip-accel')) | python | def load_configuration_file(self, configuration_file):
"""
Load configuration defaults from a configuration file.
:param configuration_file: The pathname of a configuration file (a
string).
:raises: :exc:`Exception` when the configuration file cannot be
loaded.
"""
configuration_file = parse_path(configuration_file)
logger.debug("Loading configuration file: %s", configuration_file)
parser = configparser.RawConfigParser()
files_loaded = parser.read(configuration_file)
if len(files_loaded) != 1:
msg = "Failed to load configuration file! (%s)"
raise Exception(msg % configuration_file)
elif not parser.has_section('pip-accel'):
msg = "Missing 'pip-accel' section in configuration file! (%s)"
raise Exception(msg % configuration_file)
else:
self.configuration.update(parser.items('pip-accel')) | Load configuration defaults from a configuration file.
:param configuration_file: The pathname of a configuration file (a
string).
:raises: :exc:`Exception` when the configuration file cannot be
loaded. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L123-L143 |
paylogic/pip-accel | pip_accel/config.py | Config.get | def get(self, property_name=None, environment_variable=None, configuration_option=None, default=None):
"""
Internal shortcut to get a configuration option's value.
:param property_name: The name of the property that users can set on
the :class:`Config` class (a string).
:param environment_variable: The name of the environment variable (a
string).
:param configuration_option: The name of the option in the
configuration file (a string).
:param default: The default value.
:returns: The value of the environment variable or configuration file
option or the default value.
"""
if self.overrides.get(property_name) is not None:
return self.overrides[property_name]
elif environment_variable and self.environment.get(environment_variable):
return self.environment[environment_variable]
elif self.configuration.get(configuration_option) is not None:
return self.configuration[configuration_option]
else:
return default | python | def get(self, property_name=None, environment_variable=None, configuration_option=None, default=None):
"""
Internal shortcut to get a configuration option's value.
:param property_name: The name of the property that users can set on
the :class:`Config` class (a string).
:param environment_variable: The name of the environment variable (a
string).
:param configuration_option: The name of the option in the
configuration file (a string).
:param default: The default value.
:returns: The value of the environment variable or configuration file
option or the default value.
"""
if self.overrides.get(property_name) is not None:
return self.overrides[property_name]
elif environment_variable and self.environment.get(environment_variable):
return self.environment[environment_variable]
elif self.configuration.get(configuration_option) is not None:
return self.configuration[configuration_option]
else:
return default | Internal shortcut to get a configuration option's value.
:param property_name: The name of the property that users can set on
the :class:`Config` class (a string).
:param environment_variable: The name of the environment variable (a
string).
:param configuration_option: The name of the option in the
configuration file (a string).
:param default: The default value.
:returns: The value of the environment variable or configuration file
option or the default value. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L158-L179 |
paylogic/pip-accel | pip_accel/config.py | Config.source_index | def source_index(self):
"""
The absolute pathname of pip-accel's source index directory (a string).
This is the ``sources`` subdirectory of :data:`data_directory`.
"""
return self.get(property_name='source_index',
default=os.path.join(self.data_directory, 'sources')) | python | def source_index(self):
"""
The absolute pathname of pip-accel's source index directory (a string).
This is the ``sources`` subdirectory of :data:`data_directory`.
"""
return self.get(property_name='source_index',
default=os.path.join(self.data_directory, 'sources')) | The absolute pathname of pip-accel's source index directory (a string).
This is the ``sources`` subdirectory of :data:`data_directory`. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L194-L201 |
paylogic/pip-accel | pip_accel/config.py | Config.data_directory | def data_directory(self):
"""
The absolute pathname of the directory where pip-accel's data files are stored (a string).
- Environment variable: ``$PIP_ACCEL_CACHE``
- Configuration option: ``data-directory``
- Default: ``/var/cache/pip-accel`` if running as ``root``, ``~/.pip-accel`` otherwise
"""
return expand_path(self.get(property_name='data_directory',
environment_variable='PIP_ACCEL_CACHE',
configuration_option='data-directory',
default='/var/cache/pip-accel' if is_root() else '~/.pip-accel')) | python | def data_directory(self):
"""
The absolute pathname of the directory where pip-accel's data files are stored (a string).
- Environment variable: ``$PIP_ACCEL_CACHE``
- Configuration option: ``data-directory``
- Default: ``/var/cache/pip-accel`` if running as ``root``, ``~/.pip-accel`` otherwise
"""
return expand_path(self.get(property_name='data_directory',
environment_variable='PIP_ACCEL_CACHE',
configuration_option='data-directory',
default='/var/cache/pip-accel' if is_root() else '~/.pip-accel')) | The absolute pathname of the directory where pip-accel's data files are stored (a string).
- Environment variable: ``$PIP_ACCEL_CACHE``
- Configuration option: ``data-directory``
- Default: ``/var/cache/pip-accel`` if running as ``root``, ``~/.pip-accel`` otherwise | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L226-L237 |
paylogic/pip-accel | pip_accel/config.py | Config.install_prefix | def install_prefix(self):
"""
The absolute pathname of the installation prefix to use (a string).
This property is based on :data:`sys.prefix` except that when
:data:`sys.prefix` is ``/usr`` and we're running on a Debian derived
system ``/usr/local`` is used instead.
The reason for this is that on Debian derived systems only apt (dpkg)
should be allowed to touch files in ``/usr/lib/pythonX.Y/dist-packages``
and ``python setup.py install`` knows this (see the ``posix_local``
installation scheme in ``/usr/lib/pythonX.Y/sysconfig.py`` on Debian
derived systems). Because pip-accel replaces ``python setup.py
install`` it has to replicate this logic. Inferring all of this from
the :mod:`sysconfig` module would be nice but that module wasn't
available in Python 2.6.
"""
return self.get(property_name='install_prefix',
default='/usr/local' if sys.prefix == '/usr' and self.on_debian else sys.prefix) | python | def install_prefix(self):
"""
The absolute pathname of the installation prefix to use (a string).
This property is based on :data:`sys.prefix` except that when
:data:`sys.prefix` is ``/usr`` and we're running on a Debian derived
system ``/usr/local`` is used instead.
The reason for this is that on Debian derived systems only apt (dpkg)
should be allowed to touch files in ``/usr/lib/pythonX.Y/dist-packages``
and ``python setup.py install`` knows this (see the ``posix_local``
installation scheme in ``/usr/lib/pythonX.Y/sysconfig.py`` on Debian
derived systems). Because pip-accel replaces ``python setup.py
install`` it has to replicate this logic. Inferring all of this from
the :mod:`sysconfig` module would be nice but that module wasn't
available in Python 2.6.
"""
return self.get(property_name='install_prefix',
default='/usr/local' if sys.prefix == '/usr' and self.on_debian else sys.prefix) | The absolute pathname of the installation prefix to use (a string).
This property is based on :data:`sys.prefix` except that when
:data:`sys.prefix` is ``/usr`` and we're running on a Debian derived
system ``/usr/local`` is used instead.
The reason for this is that on Debian derived systems only apt (dpkg)
should be allowed to touch files in ``/usr/lib/pythonX.Y/dist-packages``
and ``python setup.py install`` knows this (see the ``posix_local``
installation scheme in ``/usr/lib/pythonX.Y/sysconfig.py`` on Debian
derived systems). Because pip-accel replaces ``python setup.py
install`` it has to replicate this logic. Inferring all of this from
the :mod:`sysconfig` module would be nice but that module wasn't
available in Python 2.6. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L246-L264 |
paylogic/pip-accel | pip_accel/config.py | Config.python_executable | def python_executable(self):
"""The absolute pathname of the Python executable (a string)."""
return self.get(property_name='python_executable',
default=sys.executable or os.path.join(self.install_prefix, 'bin', 'python')) | python | def python_executable(self):
"""The absolute pathname of the Python executable (a string)."""
return self.get(property_name='python_executable',
default=sys.executable or os.path.join(self.install_prefix, 'bin', 'python')) | The absolute pathname of the Python executable (a string). | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L267-L270 |
paylogic/pip-accel | pip_accel/config.py | Config.auto_install | def auto_install(self):
"""
Whether automatic installation of missing system packages is enabled.
:data:`True` if automatic installation of missing system packages is
enabled, :data:`False` if it is disabled, :data:`None` otherwise (in this case
the user will be prompted at the appropriate time).
- Environment variable: ``$PIP_ACCEL_AUTO_INSTALL`` (refer to
:func:`~humanfriendly.coerce_boolean()` for details on how the
value of the environment variable is interpreted)
- Configuration option: ``auto-install`` (also parsed using
:func:`~humanfriendly.coerce_boolean()`)
- Default: :data:`None`
"""
value = self.get(property_name='auto_install',
environment_variable='PIP_ACCEL_AUTO_INSTALL',
configuration_option='auto-install')
if value is not None:
return coerce_boolean(value) | python | def auto_install(self):
"""
Whether automatic installation of missing system packages is enabled.
:data:`True` if automatic installation of missing system packages is
enabled, :data:`False` if it is disabled, :data:`None` otherwise (in this case
the user will be prompted at the appropriate time).
- Environment variable: ``$PIP_ACCEL_AUTO_INSTALL`` (refer to
:func:`~humanfriendly.coerce_boolean()` for details on how the
value of the environment variable is interpreted)
- Configuration option: ``auto-install`` (also parsed using
:func:`~humanfriendly.coerce_boolean()`)
- Default: :data:`None`
"""
value = self.get(property_name='auto_install',
environment_variable='PIP_ACCEL_AUTO_INSTALL',
configuration_option='auto-install')
if value is not None:
return coerce_boolean(value) | Whether automatic installation of missing system packages is enabled.
:data:`True` if automatic installation of missing system packages is
enabled, :data:`False` if it is disabled, :data:`None` otherwise (in this case
the user will be prompted at the appropriate time).
- Environment variable: ``$PIP_ACCEL_AUTO_INSTALL`` (refer to
:func:`~humanfriendly.coerce_boolean()` for details on how the
value of the environment variable is interpreted)
- Configuration option: ``auto-install`` (also parsed using
:func:`~humanfriendly.coerce_boolean()`)
- Default: :data:`None` | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L273-L292 |
paylogic/pip-accel | pip_accel/config.py | Config.trust_mod_times | def trust_mod_times(self):
"""
Whether to trust file modification times for cache invalidation.
- Environment variable: ``$PIP_ACCEL_TRUST_MOD_TIMES``
- Configuration option: ``trust-mod-times``
- Default: :data:`True` unless the AppVeyor_ continuous integration
environment is detected (see `issue 62`_).
.. _AppVeyor: http://www.appveyor.com
.. _issue 62: https://github.com/paylogic/pip-accel/issues/62
"""
on_appveyor = coerce_boolean(os.environ.get('APPVEYOR', 'False'))
return coerce_boolean(self.get(property_name='trust_mod_times',
environment_variable='PIP_ACCEL_TRUST_MOD_TIMES',
configuration_option='trust-mod-times',
default=(not on_appveyor))) | python | def trust_mod_times(self):
"""
Whether to trust file modification times for cache invalidation.
- Environment variable: ``$PIP_ACCEL_TRUST_MOD_TIMES``
- Configuration option: ``trust-mod-times``
- Default: :data:`True` unless the AppVeyor_ continuous integration
environment is detected (see `issue 62`_).
.. _AppVeyor: http://www.appveyor.com
.. _issue 62: https://github.com/paylogic/pip-accel/issues/62
"""
on_appveyor = coerce_boolean(os.environ.get('APPVEYOR', 'False'))
return coerce_boolean(self.get(property_name='trust_mod_times',
environment_variable='PIP_ACCEL_TRUST_MOD_TIMES',
configuration_option='trust-mod-times',
default=(not on_appveyor))) | Whether to trust file modification times for cache invalidation.
- Environment variable: ``$PIP_ACCEL_TRUST_MOD_TIMES``
- Configuration option: ``trust-mod-times``
- Default: :data:`True` unless the AppVeyor_ continuous integration
environment is detected (see `issue 62`_).
.. _AppVeyor: http://www.appveyor.com
.. _issue 62: https://github.com/paylogic/pip-accel/issues/62 | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L342-L358 |
paylogic/pip-accel | pip_accel/config.py | Config.s3_cache_readonly | def s3_cache_readonly(self):
"""
Whether the Amazon S3 bucket is considered read only.
If this is :data:`True` then the Amazon S3 bucket will only be used for
:class:`~pip_accel.caches.s3.S3CacheBackend.get()` operations (all
:class:`~pip_accel.caches.s3.S3CacheBackend.put()` operations will
be disabled).
- Environment variable: ``$PIP_ACCEL_S3_READONLY`` (refer to
:func:`~humanfriendly.coerce_boolean()` for details on how the
value of the environment variable is interpreted)
- Configuration option: ``s3-readonly`` (also parsed using
:func:`~humanfriendly.coerce_boolean()`)
- Default: :data:`False`
For details please refer to the :mod:`pip_accel.caches.s3` module.
"""
return coerce_boolean(self.get(property_name='s3_cache_readonly',
environment_variable='PIP_ACCEL_S3_READONLY',
configuration_option='s3-readonly',
default=False)) | python | def s3_cache_readonly(self):
"""
Whether the Amazon S3 bucket is considered read only.
If this is :data:`True` then the Amazon S3 bucket will only be used for
:class:`~pip_accel.caches.s3.S3CacheBackend.get()` operations (all
:class:`~pip_accel.caches.s3.S3CacheBackend.put()` operations will
be disabled).
- Environment variable: ``$PIP_ACCEL_S3_READONLY`` (refer to
:func:`~humanfriendly.coerce_boolean()` for details on how the
value of the environment variable is interpreted)
- Configuration option: ``s3-readonly`` (also parsed using
:func:`~humanfriendly.coerce_boolean()`)
- Default: :data:`False`
For details please refer to the :mod:`pip_accel.caches.s3` module.
"""
return coerce_boolean(self.get(property_name='s3_cache_readonly',
environment_variable='PIP_ACCEL_S3_READONLY',
configuration_option='s3-readonly',
default=False)) | Whether the Amazon S3 bucket is considered read only.
If this is :data:`True` then the Amazon S3 bucket will only be used for
:class:`~pip_accel.caches.s3.S3CacheBackend.get()` operations (all
:class:`~pip_accel.caches.s3.S3CacheBackend.put()` operations will
be disabled).
- Environment variable: ``$PIP_ACCEL_S3_READONLY`` (refer to
:func:`~humanfriendly.coerce_boolean()` for details on how the
value of the environment variable is interpreted)
- Configuration option: ``s3-readonly`` (also parsed using
:func:`~humanfriendly.coerce_boolean()`)
- Default: :data:`False`
For details please refer to the :mod:`pip_accel.caches.s3` module. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L427-L448 |
paylogic/pip-accel | pip_accel/config.py | Config.s3_cache_timeout | def s3_cache_timeout(self):
"""
The socket timeout in seconds for connections to Amazon S3 (an integer).
This value is injected into Boto's configuration to override the
default socket timeout used for connections to Amazon S3.
- Environment variable: ``$PIP_ACCEL_S3_TIMEOUT``
- Configuration option: ``s3-timeout``
- Default: ``60`` (`Boto's default`_)
.. _Boto's default: http://boto.readthedocs.org/en/latest/boto_config_tut.html
"""
value = self.get(property_name='s3_cache_timeout',
environment_variable='PIP_ACCEL_S3_TIMEOUT',
configuration_option='s3-timeout')
try:
n = int(value)
if n >= 0:
return n
except:
return 60 | python | def s3_cache_timeout(self):
"""
The socket timeout in seconds for connections to Amazon S3 (an integer).
This value is injected into Boto's configuration to override the
default socket timeout used for connections to Amazon S3.
- Environment variable: ``$PIP_ACCEL_S3_TIMEOUT``
- Configuration option: ``s3-timeout``
- Default: ``60`` (`Boto's default`_)
.. _Boto's default: http://boto.readthedocs.org/en/latest/boto_config_tut.html
"""
value = self.get(property_name='s3_cache_timeout',
environment_variable='PIP_ACCEL_S3_TIMEOUT',
configuration_option='s3-timeout')
try:
n = int(value)
if n >= 0:
return n
except:
return 60 | The socket timeout in seconds for connections to Amazon S3 (an integer).
This value is injected into Boto's configuration to override the
default socket timeout used for connections to Amazon S3.
- Environment variable: ``$PIP_ACCEL_S3_TIMEOUT``
- Configuration option: ``s3-timeout``
- Default: ``60`` (`Boto's default`_)
.. _Boto's default: http://boto.readthedocs.org/en/latest/boto_config_tut.html | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/config.py#L451-L472 |
paylogic/pip-accel | pip_accel/caches/s3.py | S3CacheBackend.get | def get(self, filename):
"""
Download a distribution archive from the configured Amazon S3 bucket.
:param filename: The filename of the distribution archive (a string).
:returns: The pathname of a distribution archive on the local file
system or :data:`None`.
:raises: :exc:`.CacheBackendError` when any underlying method fails.
"""
timer = Timer()
self.check_prerequisites()
with PatchedBotoConfig():
# Check if the distribution archive is available.
raw_key = self.get_cache_key(filename)
logger.info("Checking if distribution archive is available in S3 bucket: %s", raw_key)
key = self.s3_bucket.get_key(raw_key)
if key is None:
logger.debug("Distribution archive is not available in S3 bucket.")
else:
# Download the distribution archive to the local binary index.
# TODO Shouldn't this use LocalCacheBackend.put() instead of
# implementing the same steps manually?!
logger.info("Downloading distribution archive from S3 bucket ..")
file_in_cache = os.path.join(self.config.binary_cache, filename)
makedirs(os.path.dirname(file_in_cache))
with AtomicReplace(file_in_cache) as temporary_file:
key.get_contents_to_filename(temporary_file)
logger.debug("Finished downloading distribution archive from S3 bucket in %s.", timer)
return file_in_cache | python | def get(self, filename):
"""
Download a distribution archive from the configured Amazon S3 bucket.
:param filename: The filename of the distribution archive (a string).
:returns: The pathname of a distribution archive on the local file
system or :data:`None`.
:raises: :exc:`.CacheBackendError` when any underlying method fails.
"""
timer = Timer()
self.check_prerequisites()
with PatchedBotoConfig():
# Check if the distribution archive is available.
raw_key = self.get_cache_key(filename)
logger.info("Checking if distribution archive is available in S3 bucket: %s", raw_key)
key = self.s3_bucket.get_key(raw_key)
if key is None:
logger.debug("Distribution archive is not available in S3 bucket.")
else:
# Download the distribution archive to the local binary index.
# TODO Shouldn't this use LocalCacheBackend.put() instead of
# implementing the same steps manually?!
logger.info("Downloading distribution archive from S3 bucket ..")
file_in_cache = os.path.join(self.config.binary_cache, filename)
makedirs(os.path.dirname(file_in_cache))
with AtomicReplace(file_in_cache) as temporary_file:
key.get_contents_to_filename(temporary_file)
logger.debug("Finished downloading distribution archive from S3 bucket in %s.", timer)
return file_in_cache | Download a distribution archive from the configured Amazon S3 bucket.
:param filename: The filename of the distribution archive (a string).
:returns: The pathname of a distribution archive on the local file
system or :data:`None`.
:raises: :exc:`.CacheBackendError` when any underlying method fails. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/caches/s3.py#L143-L171 |
paylogic/pip-accel | pip_accel/caches/s3.py | S3CacheBackend.put | def put(self, filename, handle):
"""
Upload a distribution archive to the configured Amazon S3 bucket.
If the :attr:`~.Config.s3_cache_readonly` configuration option is
enabled this method does nothing.
:param filename: The filename of the distribution archive (a string).
:param handle: A file-like object that provides access to the
distribution archive.
:raises: :exc:`.CacheBackendError` when any underlying method fails.
"""
if self.config.s3_cache_readonly:
logger.info('Skipping upload to S3 bucket (using S3 in read only mode).')
else:
timer = Timer()
self.check_prerequisites()
with PatchedBotoConfig():
from boto.s3.key import Key
raw_key = self.get_cache_key(filename)
logger.info("Uploading distribution archive to S3 bucket: %s", raw_key)
key = Key(self.s3_bucket)
key.key = raw_key
try:
key.set_contents_from_file(handle)
except Exception as e:
logger.info("Encountered error writing to S3 bucket, "
"falling back to read only mode (exception: %s)", e)
self.config.s3_cache_readonly = True
else:
logger.info("Finished uploading distribution archive to S3 bucket in %s.", timer) | python | def put(self, filename, handle):
"""
Upload a distribution archive to the configured Amazon S3 bucket.
If the :attr:`~.Config.s3_cache_readonly` configuration option is
enabled this method does nothing.
:param filename: The filename of the distribution archive (a string).
:param handle: A file-like object that provides access to the
distribution archive.
:raises: :exc:`.CacheBackendError` when any underlying method fails.
"""
if self.config.s3_cache_readonly:
logger.info('Skipping upload to S3 bucket (using S3 in read only mode).')
else:
timer = Timer()
self.check_prerequisites()
with PatchedBotoConfig():
from boto.s3.key import Key
raw_key = self.get_cache_key(filename)
logger.info("Uploading distribution archive to S3 bucket: %s", raw_key)
key = Key(self.s3_bucket)
key.key = raw_key
try:
key.set_contents_from_file(handle)
except Exception as e:
logger.info("Encountered error writing to S3 bucket, "
"falling back to read only mode (exception: %s)", e)
self.config.s3_cache_readonly = True
else:
logger.info("Finished uploading distribution archive to S3 bucket in %s.", timer) | Upload a distribution archive to the configured Amazon S3 bucket.
If the :attr:`~.Config.s3_cache_readonly` configuration option is
enabled this method does nothing.
:param filename: The filename of the distribution archive (a string).
:param handle: A file-like object that provides access to the
distribution archive.
:raises: :exc:`.CacheBackendError` when any underlying method fails. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/caches/s3.py#L173-L203 |
paylogic/pip-accel | pip_accel/caches/s3.py | S3CacheBackend.s3_bucket | def s3_bucket(self):
"""
Connect to the user defined Amazon S3 bucket.
Called on demand by :func:`get()` and :func:`put()`. Caches its
return value so that only a single connection is created.
:returns: A :class:`boto.s3.bucket.Bucket` object.
:raises: :exc:`.CacheBackendDisabledError` when the user hasn't
defined :attr:`.Config.s3_cache_bucket`.
:raises: :exc:`.CacheBackendError` when the connection to the Amazon
S3 bucket fails.
"""
if not hasattr(self, 'cached_bucket'):
self.check_prerequisites()
with PatchedBotoConfig():
from boto.exception import BotoClientError, BotoServerError, S3ResponseError
# The following try/except block translates unexpected exceptions
# raised by Boto into a CacheBackendError exception.
try:
# The following try/except block handles the expected exception
# raised by Boto when an Amazon S3 bucket does not exist.
try:
logger.debug("Connecting to Amazon S3 bucket: %s", self.config.s3_cache_bucket)
self.cached_bucket = self.s3_connection.get_bucket(self.config.s3_cache_bucket)
except S3ResponseError as e:
if e.status == 404 and self.config.s3_cache_create_bucket:
logger.info("Amazon S3 bucket doesn't exist yet, creating it now: %s",
self.config.s3_cache_bucket)
self.s3_connection.create_bucket(self.config.s3_cache_bucket)
self.cached_bucket = self.s3_connection.get_bucket(self.config.s3_cache_bucket)
else:
# Don't swallow exceptions we can't handle.
raise
except (BotoClientError, BotoServerError):
raise CacheBackendError("""
Failed to connect to the configured Amazon S3 bucket
{bucket}! Are you sure the bucket exists and is accessible
using the provided credentials? The Amazon S3 cache backend
will be disabled for now.
""", bucket=repr(self.config.s3_cache_bucket))
return self.cached_bucket | python | def s3_bucket(self):
"""
Connect to the user defined Amazon S3 bucket.
Called on demand by :func:`get()` and :func:`put()`. Caches its
return value so that only a single connection is created.
:returns: A :class:`boto.s3.bucket.Bucket` object.
:raises: :exc:`.CacheBackendDisabledError` when the user hasn't
defined :attr:`.Config.s3_cache_bucket`.
:raises: :exc:`.CacheBackendError` when the connection to the Amazon
S3 bucket fails.
"""
if not hasattr(self, 'cached_bucket'):
self.check_prerequisites()
with PatchedBotoConfig():
from boto.exception import BotoClientError, BotoServerError, S3ResponseError
# The following try/except block translates unexpected exceptions
# raised by Boto into a CacheBackendError exception.
try:
# The following try/except block handles the expected exception
# raised by Boto when an Amazon S3 bucket does not exist.
try:
logger.debug("Connecting to Amazon S3 bucket: %s", self.config.s3_cache_bucket)
self.cached_bucket = self.s3_connection.get_bucket(self.config.s3_cache_bucket)
except S3ResponseError as e:
if e.status == 404 and self.config.s3_cache_create_bucket:
logger.info("Amazon S3 bucket doesn't exist yet, creating it now: %s",
self.config.s3_cache_bucket)
self.s3_connection.create_bucket(self.config.s3_cache_bucket)
self.cached_bucket = self.s3_connection.get_bucket(self.config.s3_cache_bucket)
else:
# Don't swallow exceptions we can't handle.
raise
except (BotoClientError, BotoServerError):
raise CacheBackendError("""
Failed to connect to the configured Amazon S3 bucket
{bucket}! Are you sure the bucket exists and is accessible
using the provided credentials? The Amazon S3 cache backend
will be disabled for now.
""", bucket=repr(self.config.s3_cache_bucket))
return self.cached_bucket | Connect to the user defined Amazon S3 bucket.
Called on demand by :func:`get()` and :func:`put()`. Caches its
return value so that only a single connection is created.
:returns: A :class:`boto.s3.bucket.Bucket` object.
:raises: :exc:`.CacheBackendDisabledError` when the user hasn't
defined :attr:`.Config.s3_cache_bucket`.
:raises: :exc:`.CacheBackendError` when the connection to the Amazon
S3 bucket fails. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/caches/s3.py#L206-L247 |
paylogic/pip-accel | pip_accel/caches/s3.py | S3CacheBackend.s3_connection | def s3_connection(self):
"""
Connect to the Amazon S3 API.
If the connection attempt fails because Boto can't find credentials the
attempt is retried once with an anonymous connection.
Called on demand by :attr:`s3_bucket`.
:returns: A :class:`boto.s3.connection.S3Connection` object.
:raises: :exc:`.CacheBackendError` when the connection to the Amazon
S3 API fails.
"""
if not hasattr(self, 'cached_connection'):
self.check_prerequisites()
with PatchedBotoConfig():
import boto
from boto.exception import BotoClientError, BotoServerError, NoAuthHandlerFound
from boto.s3.connection import S3Connection, SubdomainCallingFormat, OrdinaryCallingFormat
try:
# Configure the number of retries and the socket timeout used
# by Boto. Based on the snippet given in the following email:
# https://groups.google.com/d/msg/boto-users/0osmP0cUl5Y/X4NdlMGWKiEJ
if not boto.config.has_section(BOTO_CONFIG_SECTION):
boto.config.add_section(BOTO_CONFIG_SECTION)
boto.config.set(BOTO_CONFIG_SECTION,
BOTO_CONFIG_NUM_RETRIES_OPTION,
str(self.config.s3_cache_retries))
boto.config.set(BOTO_CONFIG_SECTION,
BOTO_CONFIG_SOCKET_TIMEOUT_OPTION,
str(self.config.s3_cache_timeout))
logger.debug("Connecting to Amazon S3 API ..")
endpoint = urlparse(self.config.s3_cache_url)
host, _, port = endpoint.netloc.partition(':')
kw = dict(
host=host,
port=int(port) if port else None,
is_secure=(endpoint.scheme == 'https'),
calling_format=(SubdomainCallingFormat() if host == S3Connection.DefaultHost
else OrdinaryCallingFormat()),
)
try:
self.cached_connection = S3Connection(**kw)
except NoAuthHandlerFound:
logger.debug("Amazon S3 API credentials missing, retrying with anonymous connection ..")
self.cached_connection = S3Connection(anon=True, **kw)
except (BotoClientError, BotoServerError):
raise CacheBackendError("""
Failed to connect to the Amazon S3 API! Most likely your
credentials are not correctly configured. The Amazon S3
cache backend will be disabled for now.
""")
return self.cached_connection | python | def s3_connection(self):
"""
Connect to the Amazon S3 API.
If the connection attempt fails because Boto can't find credentials the
attempt is retried once with an anonymous connection.
Called on demand by :attr:`s3_bucket`.
:returns: A :class:`boto.s3.connection.S3Connection` object.
:raises: :exc:`.CacheBackendError` when the connection to the Amazon
S3 API fails.
"""
if not hasattr(self, 'cached_connection'):
self.check_prerequisites()
with PatchedBotoConfig():
import boto
from boto.exception import BotoClientError, BotoServerError, NoAuthHandlerFound
from boto.s3.connection import S3Connection, SubdomainCallingFormat, OrdinaryCallingFormat
try:
# Configure the number of retries and the socket timeout used
# by Boto. Based on the snippet given in the following email:
# https://groups.google.com/d/msg/boto-users/0osmP0cUl5Y/X4NdlMGWKiEJ
if not boto.config.has_section(BOTO_CONFIG_SECTION):
boto.config.add_section(BOTO_CONFIG_SECTION)
boto.config.set(BOTO_CONFIG_SECTION,
BOTO_CONFIG_NUM_RETRIES_OPTION,
str(self.config.s3_cache_retries))
boto.config.set(BOTO_CONFIG_SECTION,
BOTO_CONFIG_SOCKET_TIMEOUT_OPTION,
str(self.config.s3_cache_timeout))
logger.debug("Connecting to Amazon S3 API ..")
endpoint = urlparse(self.config.s3_cache_url)
host, _, port = endpoint.netloc.partition(':')
kw = dict(
host=host,
port=int(port) if port else None,
is_secure=(endpoint.scheme == 'https'),
calling_format=(SubdomainCallingFormat() if host == S3Connection.DefaultHost
else OrdinaryCallingFormat()),
)
try:
self.cached_connection = S3Connection(**kw)
except NoAuthHandlerFound:
logger.debug("Amazon S3 API credentials missing, retrying with anonymous connection ..")
self.cached_connection = S3Connection(anon=True, **kw)
except (BotoClientError, BotoServerError):
raise CacheBackendError("""
Failed to connect to the Amazon S3 API! Most likely your
credentials are not correctly configured. The Amazon S3
cache backend will be disabled for now.
""")
return self.cached_connection | Connect to the Amazon S3 API.
If the connection attempt fails because Boto can't find credentials the
attempt is retried once with an anonymous connection.
Called on demand by :attr:`s3_bucket`.
:returns: A :class:`boto.s3.connection.S3Connection` object.
:raises: :exc:`.CacheBackendError` when the connection to the Amazon
S3 API fails. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/caches/s3.py#L250-L302 |
paylogic/pip-accel | pip_accel/caches/s3.py | PatchedBotoConfig.get | def get(self, section, name, default=None, **kw):
"""Replacement for :func:`boto.pyami.config.Config.get()`."""
try:
return self.unbound_method(self.instance, section, name, **kw)
except Exception:
return default | python | def get(self, section, name, default=None, **kw):
"""Replacement for :func:`boto.pyami.config.Config.get()`."""
try:
return self.unbound_method(self.instance, section, name, **kw)
except Exception:
return default | Replacement for :func:`boto.pyami.config.Config.get()`. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/caches/s3.py#L367-L372 |
paylogic/pip-accel | pip_accel/req.py | Requirement.related_archives | def related_archives(self):
"""
The pathnames of the source distribution(s) for this requirement (a list of strings).
.. note:: This property is very new in pip-accel and its logic may need
some time to mature. For now any misbehavior by this property
shouldn't be too much of a problem because the pathnames
reported by this property are only used for cache
invalidation (see the :attr:`last_modified` and
:attr:`checksum` properties).
"""
# Escape the requirement's name for use in a regular expression.
name_pattern = escape_name(self.name)
# Escape the requirement's version for in a regular expression.
version_pattern = re.escape(self.version)
# Create a regular expression that matches any of the known source
# distribution archive extensions.
extension_pattern = '|'.join(re.escape(ext) for ext in ARCHIVE_EXTENSIONS if ext != '.whl')
# Compose the regular expression pattern to match filenames of source
# distribution archives in the local source index directory.
pattern = '^%s-%s(%s)$' % (name_pattern, version_pattern, extension_pattern)
# Compile the regular expression for case insensitive matching.
compiled_pattern = re.compile(pattern, re.IGNORECASE)
# Find the matching source distribution archives.
return [os.path.join(self.config.source_index, fn)
for fn in os.listdir(self.config.source_index)
if compiled_pattern.match(fn)] | python | def related_archives(self):
"""
The pathnames of the source distribution(s) for this requirement (a list of strings).
.. note:: This property is very new in pip-accel and its logic may need
some time to mature. For now any misbehavior by this property
shouldn't be too much of a problem because the pathnames
reported by this property are only used for cache
invalidation (see the :attr:`last_modified` and
:attr:`checksum` properties).
"""
# Escape the requirement's name for use in a regular expression.
name_pattern = escape_name(self.name)
# Escape the requirement's version for in a regular expression.
version_pattern = re.escape(self.version)
# Create a regular expression that matches any of the known source
# distribution archive extensions.
extension_pattern = '|'.join(re.escape(ext) for ext in ARCHIVE_EXTENSIONS if ext != '.whl')
# Compose the regular expression pattern to match filenames of source
# distribution archives in the local source index directory.
pattern = '^%s-%s(%s)$' % (name_pattern, version_pattern, extension_pattern)
# Compile the regular expression for case insensitive matching.
compiled_pattern = re.compile(pattern, re.IGNORECASE)
# Find the matching source distribution archives.
return [os.path.join(self.config.source_index, fn)
for fn in os.listdir(self.config.source_index)
if compiled_pattern.match(fn)] | The pathnames of the source distribution(s) for this requirement (a list of strings).
.. note:: This property is very new in pip-accel and its logic may need
some time to mature. For now any misbehavior by this property
shouldn't be too much of a problem because the pathnames
reported by this property are only used for cache
invalidation (see the :attr:`last_modified` and
:attr:`checksum` properties). | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/req.py#L96-L122 |
paylogic/pip-accel | pip_accel/req.py | Requirement.last_modified | def last_modified(self):
"""
The last modified time of the requirement's source distribution archive(s) (a number).
The value of this property is based on the :attr:`related_archives`
property. If no related archives are found the current time is
reported. In the balance between not invalidating cached binary
distributions enough and invalidating them too frequently, this
property causes the latter to happen.
"""
mtimes = list(map(os.path.getmtime, self.related_archives))
return max(mtimes) if mtimes else time.time() | python | def last_modified(self):
"""
The last modified time of the requirement's source distribution archive(s) (a number).
The value of this property is based on the :attr:`related_archives`
property. If no related archives are found the current time is
reported. In the balance between not invalidating cached binary
distributions enough and invalidating them too frequently, this
property causes the latter to happen.
"""
mtimes = list(map(os.path.getmtime, self.related_archives))
return max(mtimes) if mtimes else time.time() | The last modified time of the requirement's source distribution archive(s) (a number).
The value of this property is based on the :attr:`related_archives`
property. If no related archives are found the current time is
reported. In the balance between not invalidating cached binary
distributions enough and invalidating them too frequently, this
property causes the latter to happen. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/req.py#L125-L136 |
paylogic/pip-accel | pip_accel/req.py | Requirement.is_wheel | def is_wheel(self):
"""
:data:`True` when the requirement is a wheel, :data:`False` otherwise.
.. note:: To my surprise it seems to be non-trivial to determine
whether a given :class:`pip.req.InstallRequirement` object
produced by pip's internal Python API concerns a source
distribution or a wheel distribution.
There's a :class:`pip.req.InstallRequirement.is_wheel`
property but I'm currently looking at a wheel distribution
whose ``is_wheel`` property returns :data:`None`, apparently
because the requirement's ``url`` property is also :data:`None`.
Whether this is an obscure implementation detail of pip or
caused by the way pip-accel invokes pip, I really can't tell
(yet).
"""
probably_sdist = os.path.isfile(os.path.join(self.source_directory, 'setup.py'))
probably_wheel = len(glob.glob(os.path.join(self.source_directory, '*.dist-info', 'WHEEL'))) > 0
if probably_wheel and not probably_sdist:
return True
elif probably_sdist and not probably_wheel:
return False
elif probably_sdist and probably_wheel:
variables = dict(requirement=self.setuptools_requirement,
directory=self.source_directory)
raise UnknownDistributionFormat("""
The unpacked distribution of {requirement} in {directory} looks
like a source distribution and a wheel distribution, I'm
confused!
""", **variables)
else:
variables = dict(requirement=self.setuptools_requirement,
directory=self.source_directory)
raise UnknownDistributionFormat("""
The unpacked distribution of {requirement} in {directory}
doesn't look like a source distribution and also doesn't look
like a wheel distribution, I'm confused!
""", **variables) | python | def is_wheel(self):
"""
:data:`True` when the requirement is a wheel, :data:`False` otherwise.
.. note:: To my surprise it seems to be non-trivial to determine
whether a given :class:`pip.req.InstallRequirement` object
produced by pip's internal Python API concerns a source
distribution or a wheel distribution.
There's a :class:`pip.req.InstallRequirement.is_wheel`
property but I'm currently looking at a wheel distribution
whose ``is_wheel`` property returns :data:`None`, apparently
because the requirement's ``url`` property is also :data:`None`.
Whether this is an obscure implementation detail of pip or
caused by the way pip-accel invokes pip, I really can't tell
(yet).
"""
probably_sdist = os.path.isfile(os.path.join(self.source_directory, 'setup.py'))
probably_wheel = len(glob.glob(os.path.join(self.source_directory, '*.dist-info', 'WHEEL'))) > 0
if probably_wheel and not probably_sdist:
return True
elif probably_sdist and not probably_wheel:
return False
elif probably_sdist and probably_wheel:
variables = dict(requirement=self.setuptools_requirement,
directory=self.source_directory)
raise UnknownDistributionFormat("""
The unpacked distribution of {requirement} in {directory} looks
like a source distribution and a wheel distribution, I'm
confused!
""", **variables)
else:
variables = dict(requirement=self.setuptools_requirement,
directory=self.source_directory)
raise UnknownDistributionFormat("""
The unpacked distribution of {requirement} in {directory}
doesn't look like a source distribution and also doesn't look
like a wheel distribution, I'm confused!
""", **variables) | :data:`True` when the requirement is a wheel, :data:`False` otherwise.
.. note:: To my surprise it seems to be non-trivial to determine
whether a given :class:`pip.req.InstallRequirement` object
produced by pip's internal Python API concerns a source
distribution or a wheel distribution.
There's a :class:`pip.req.InstallRequirement.is_wheel`
property but I'm currently looking at a wheel distribution
whose ``is_wheel`` property returns :data:`None`, apparently
because the requirement's ``url`` property is also :data:`None`.
Whether this is an obscure implementation detail of pip or
caused by the way pip-accel invokes pip, I really can't tell
(yet). | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/req.py#L160-L199 |
paylogic/pip-accel | pip_accel/req.py | Requirement.wheel_metadata | def wheel_metadata(self):
"""Get the distribution metadata of an unpacked wheel distribution."""
if not self.is_wheel:
raise TypeError("Requirement is not a wheel distribution!")
for distribution in find_distributions(self.source_directory):
return distribution
msg = "pkg_resources didn't find a wheel distribution in %s!"
raise Exception(msg % self.source_directory) | python | def wheel_metadata(self):
"""Get the distribution metadata of an unpacked wheel distribution."""
if not self.is_wheel:
raise TypeError("Requirement is not a wheel distribution!")
for distribution in find_distributions(self.source_directory):
return distribution
msg = "pkg_resources didn't find a wheel distribution in %s!"
raise Exception(msg % self.source_directory) | Get the distribution metadata of an unpacked wheel distribution. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/req.py#L238-L245 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.validate_environment | def validate_environment(self):
"""
Make sure :data:`sys.prefix` matches ``$VIRTUAL_ENV`` (if defined).
This may seem like a strange requirement to dictate but it avoids hairy
issues like `documented here <https://github.com/paylogic/pip-accel/issues/5>`_.
The most sneaky thing is that ``pip`` doesn't have this problem
(de-facto) because ``virtualenv`` copies ``pip`` wherever it goes...
(``pip-accel`` on the other hand has to be installed by the user).
"""
environment = os.environ.get('VIRTUAL_ENV')
if environment:
if not same_directories(sys.prefix, environment):
raise EnvironmentMismatchError("""
You are trying to install packages in environment #1 which
is different from environment #2 where pip-accel is
installed! Please install pip-accel under environment #1 to
install packages there.
Environment #1: {environment} (defined by $VIRTUAL_ENV)
Environment #2: {prefix} (Python's installation prefix)
""", environment=environment, prefix=sys.prefix) | python | def validate_environment(self):
"""
Make sure :data:`sys.prefix` matches ``$VIRTUAL_ENV`` (if defined).
This may seem like a strange requirement to dictate but it avoids hairy
issues like `documented here <https://github.com/paylogic/pip-accel/issues/5>`_.
The most sneaky thing is that ``pip`` doesn't have this problem
(de-facto) because ``virtualenv`` copies ``pip`` wherever it goes...
(``pip-accel`` on the other hand has to be installed by the user).
"""
environment = os.environ.get('VIRTUAL_ENV')
if environment:
if not same_directories(sys.prefix, environment):
raise EnvironmentMismatchError("""
You are trying to install packages in environment #1 which
is different from environment #2 where pip-accel is
installed! Please install pip-accel under environment #1 to
install packages there.
Environment #1: {environment} (defined by $VIRTUAL_ENV)
Environment #2: {prefix} (Python's installation prefix)
""", environment=environment, prefix=sys.prefix) | Make sure :data:`sys.prefix` matches ``$VIRTUAL_ENV`` (if defined).
This may seem like a strange requirement to dictate but it avoids hairy
issues like `documented here <https://github.com/paylogic/pip-accel/issues/5>`_.
The most sneaky thing is that ``pip`` doesn't have this problem
(de-facto) because ``virtualenv`` copies ``pip`` wherever it goes...
(``pip-accel`` on the other hand has to be installed by the user). | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L121-L144 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.initialize_directories | def initialize_directories(self):
"""Automatically create local directories required by pip-accel."""
makedirs(self.config.source_index)
makedirs(self.config.eggs_cache) | python | def initialize_directories(self):
"""Automatically create local directories required by pip-accel."""
makedirs(self.config.source_index)
makedirs(self.config.eggs_cache) | Automatically create local directories required by pip-accel. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L146-L149 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.clean_source_index | def clean_source_index(self):
"""
Cleanup broken symbolic links in the local source distribution index.
The purpose of this method requires some context to understand. Let me
preface this by stating that I realize I'm probably overcomplicating
things, but I like to preserve forward / backward compatibility when
possible and I don't feel like dropping everyone's locally cached
source distribution archives without a good reason to do so. With that
out of the way:
- Versions of pip-accel based on pip 1.4.x maintained a local source
distribution index based on a directory containing symbolic links
pointing directly into pip's download cache. When files were removed
from pip's download cache, broken symbolic links remained in
pip-accel's local source distribution index directory. This resulted
in very confusing error messages. To avoid this
:func:`clean_source_index()` cleaned up broken symbolic links
whenever pip-accel was about to invoke pip.
- More recent versions of pip (6.x) no longer support the same style of
download cache that contains source distribution archives that can be
re-used directly by pip-accel. To cope with the changes in pip 6.x
new versions of pip-accel tell pip to download source distribution
archives directly into the local source distribution index directory
maintained by pip-accel.
- It is very reasonable for users of pip-accel to have multiple
versions of pip-accel installed on their system (imagine a dozen
Python virtual environments that won't all be updated at the same
time; this is the situation I always find myself in :-). These
versions of pip-accel will be sharing the same local source
distribution index directory.
- All of this leads up to the local source distribution index directory
containing a mixture of symbolic links and regular files with no
obvious way to atomically and gracefully upgrade the local source
distribution index directory while avoiding fights between old and
new versions of pip-accel :-).
- I could of course switch to storing the new local source distribution
index in a differently named directory (avoiding potential conflicts
between multiple versions of pip-accel) but then I would have to
introduce a new configuration option, otherwise everyone who has
configured pip-accel to store its source index in a non-default
location could still be bitten by compatibility issues.
For now I've decided to keep using the same directory for the local
source distribution index and to keep cleaning up broken symbolic
links. This enables cooperating between old and new versions of
pip-accel and avoids trashing user's local source distribution indexes.
The main disadvantage is that pip-accel is still required to clean up
broken symbolic links...
"""
cleanup_timer = Timer()
cleanup_counter = 0
for entry in os.listdir(self.config.source_index):
pathname = os.path.join(self.config.source_index, entry)
if os.path.islink(pathname) and not os.path.exists(pathname):
logger.warn("Cleaning up broken symbolic link: %s", pathname)
os.unlink(pathname)
cleanup_counter += 1
logger.debug("Cleaned up %i broken symbolic links from source index in %s.", cleanup_counter, cleanup_timer) | python | def clean_source_index(self):
"""
Cleanup broken symbolic links in the local source distribution index.
The purpose of this method requires some context to understand. Let me
preface this by stating that I realize I'm probably overcomplicating
things, but I like to preserve forward / backward compatibility when
possible and I don't feel like dropping everyone's locally cached
source distribution archives without a good reason to do so. With that
out of the way:
- Versions of pip-accel based on pip 1.4.x maintained a local source
distribution index based on a directory containing symbolic links
pointing directly into pip's download cache. When files were removed
from pip's download cache, broken symbolic links remained in
pip-accel's local source distribution index directory. This resulted
in very confusing error messages. To avoid this
:func:`clean_source_index()` cleaned up broken symbolic links
whenever pip-accel was about to invoke pip.
- More recent versions of pip (6.x) no longer support the same style of
download cache that contains source distribution archives that can be
re-used directly by pip-accel. To cope with the changes in pip 6.x
new versions of pip-accel tell pip to download source distribution
archives directly into the local source distribution index directory
maintained by pip-accel.
- It is very reasonable for users of pip-accel to have multiple
versions of pip-accel installed on their system (imagine a dozen
Python virtual environments that won't all be updated at the same
time; this is the situation I always find myself in :-). These
versions of pip-accel will be sharing the same local source
distribution index directory.
- All of this leads up to the local source distribution index directory
containing a mixture of symbolic links and regular files with no
obvious way to atomically and gracefully upgrade the local source
distribution index directory while avoiding fights between old and
new versions of pip-accel :-).
- I could of course switch to storing the new local source distribution
index in a differently named directory (avoiding potential conflicts
between multiple versions of pip-accel) but then I would have to
introduce a new configuration option, otherwise everyone who has
configured pip-accel to store its source index in a non-default
location could still be bitten by compatibility issues.
For now I've decided to keep using the same directory for the local
source distribution index and to keep cleaning up broken symbolic
links. This enables cooperating between old and new versions of
pip-accel and avoids trashing user's local source distribution indexes.
The main disadvantage is that pip-accel is still required to clean up
broken symbolic links...
"""
cleanup_timer = Timer()
cleanup_counter = 0
for entry in os.listdir(self.config.source_index):
pathname = os.path.join(self.config.source_index, entry)
if os.path.islink(pathname) and not os.path.exists(pathname):
logger.warn("Cleaning up broken symbolic link: %s", pathname)
os.unlink(pathname)
cleanup_counter += 1
logger.debug("Cleaned up %i broken symbolic links from source index in %s.", cleanup_counter, cleanup_timer) | Cleanup broken symbolic links in the local source distribution index.
The purpose of this method requires some context to understand. Let me
preface this by stating that I realize I'm probably overcomplicating
things, but I like to preserve forward / backward compatibility when
possible and I don't feel like dropping everyone's locally cached
source distribution archives without a good reason to do so. With that
out of the way:
- Versions of pip-accel based on pip 1.4.x maintained a local source
distribution index based on a directory containing symbolic links
pointing directly into pip's download cache. When files were removed
from pip's download cache, broken symbolic links remained in
pip-accel's local source distribution index directory. This resulted
in very confusing error messages. To avoid this
:func:`clean_source_index()` cleaned up broken symbolic links
whenever pip-accel was about to invoke pip.
- More recent versions of pip (6.x) no longer support the same style of
download cache that contains source distribution archives that can be
re-used directly by pip-accel. To cope with the changes in pip 6.x
new versions of pip-accel tell pip to download source distribution
archives directly into the local source distribution index directory
maintained by pip-accel.
- It is very reasonable for users of pip-accel to have multiple
versions of pip-accel installed on their system (imagine a dozen
Python virtual environments that won't all be updated at the same
time; this is the situation I always find myself in :-). These
versions of pip-accel will be sharing the same local source
distribution index directory.
- All of this leads up to the local source distribution index directory
containing a mixture of symbolic links and regular files with no
obvious way to atomically and gracefully upgrade the local source
distribution index directory while avoiding fights between old and
new versions of pip-accel :-).
- I could of course switch to storing the new local source distribution
index in a differently named directory (avoiding potential conflicts
between multiple versions of pip-accel) but then I would have to
introduce a new configuration option, otherwise everyone who has
configured pip-accel to store its source index in a non-default
location could still be bitten by compatibility issues.
For now I've decided to keep using the same directory for the local
source distribution index and to keep cleaning up broken symbolic
links. This enables cooperating between old and new versions of
pip-accel and avoids trashing user's local source distribution indexes.
The main disadvantage is that pip-accel is still required to clean up
broken symbolic links... | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L151-L213 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.install_from_arguments | def install_from_arguments(self, arguments, **kw):
"""
Download, unpack, build and install the specified requirements.
This function is a simple wrapper for :func:`get_requirements()`,
:func:`install_requirements()` and :func:`cleanup_temporary_directories()`
that implements the default behavior of the pip accelerator. If you're
extending or embedding pip-accel you may want to call the underlying
methods instead.
If the requirement set includes wheels and ``setuptools >= 0.8`` is not
yet installed, it will be added to the requirement set and installed
together with the other requirement(s) in order to enable the usage of
distributions installed from wheels (their metadata is different).
:param arguments: The command line arguments to ``pip install ..`` (a
list of strings).
:param kw: Any keyword arguments are passed on to
:func:`install_requirements()`.
:returns: The result of :func:`install_requirements()`.
"""
try:
requirements = self.get_requirements(arguments, use_wheels=self.arguments_allow_wheels(arguments))
have_wheels = any(req.is_wheel for req in requirements)
if have_wheels and not self.setuptools_supports_wheels():
logger.info("Preparing to upgrade to setuptools >= 0.8 to enable wheel support ..")
requirements.extend(self.get_requirements(['setuptools >= 0.8']))
if requirements:
if '--user' in arguments:
from site import USER_BASE
kw.setdefault('prefix', USER_BASE)
return self.install_requirements(requirements, **kw)
else:
logger.info("Nothing to do! (requirements already installed)")
return 0
finally:
self.cleanup_temporary_directories() | python | def install_from_arguments(self, arguments, **kw):
"""
Download, unpack, build and install the specified requirements.
This function is a simple wrapper for :func:`get_requirements()`,
:func:`install_requirements()` and :func:`cleanup_temporary_directories()`
that implements the default behavior of the pip accelerator. If you're
extending or embedding pip-accel you may want to call the underlying
methods instead.
If the requirement set includes wheels and ``setuptools >= 0.8`` is not
yet installed, it will be added to the requirement set and installed
together with the other requirement(s) in order to enable the usage of
distributions installed from wheels (their metadata is different).
:param arguments: The command line arguments to ``pip install ..`` (a
list of strings).
:param kw: Any keyword arguments are passed on to
:func:`install_requirements()`.
:returns: The result of :func:`install_requirements()`.
"""
try:
requirements = self.get_requirements(arguments, use_wheels=self.arguments_allow_wheels(arguments))
have_wheels = any(req.is_wheel for req in requirements)
if have_wheels and not self.setuptools_supports_wheels():
logger.info("Preparing to upgrade to setuptools >= 0.8 to enable wheel support ..")
requirements.extend(self.get_requirements(['setuptools >= 0.8']))
if requirements:
if '--user' in arguments:
from site import USER_BASE
kw.setdefault('prefix', USER_BASE)
return self.install_requirements(requirements, **kw)
else:
logger.info("Nothing to do! (requirements already installed)")
return 0
finally:
self.cleanup_temporary_directories() | Download, unpack, build and install the specified requirements.
This function is a simple wrapper for :func:`get_requirements()`,
:func:`install_requirements()` and :func:`cleanup_temporary_directories()`
that implements the default behavior of the pip accelerator. If you're
extending or embedding pip-accel you may want to call the underlying
methods instead.
If the requirement set includes wheels and ``setuptools >= 0.8`` is not
yet installed, it will be added to the requirement set and installed
together with the other requirement(s) in order to enable the usage of
distributions installed from wheels (their metadata is different).
:param arguments: The command line arguments to ``pip install ..`` (a
list of strings).
:param kw: Any keyword arguments are passed on to
:func:`install_requirements()`.
:returns: The result of :func:`install_requirements()`. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L215-L251 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.get_requirements | def get_requirements(self, arguments, max_retries=None, use_wheels=False):
"""
Use pip to download and unpack the requested source distribution archives.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param max_retries: The maximum number of times that pip will be asked
to download distribution archives (this helps to
deal with intermittent failures). If this is
:data:`None` then :attr:`~.Config.max_retries` is
used.
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
.. warning:: Requirements which are already installed are not included
in the result. If this breaks your use case consider using
pip's ``--ignore-installed`` option.
"""
arguments = self.decorate_arguments(arguments)
# Demote hash sum mismatch log messages from CRITICAL to DEBUG (hiding
# implementation details from users unless they want to see them).
with DownloadLogFilter():
with SetupRequiresPatch(self.config, self.eggs_links):
# Use a new build directory for each run of get_requirements().
self.create_build_directory()
# Check whether -U or --upgrade was given.
if any(match_option(a, '-U', '--upgrade') for a in arguments):
logger.info("Checking index(es) for new version (-U or --upgrade was given) ..")
else:
# If -U or --upgrade wasn't given and all requirements can be
# satisfied using the archives in pip-accel's local source
# index we don't need pip to connect to PyPI looking for new
# versions (that will just slow us down).
try:
return self.unpack_source_dists(arguments, use_wheels=use_wheels)
except DistributionNotFound:
logger.info("We don't have all distribution archives yet!")
# Get the maximum number of retries from the configuration if the
# caller didn't specify a preference.
if max_retries is None:
max_retries = self.config.max_retries
# If not all requirements are available locally we use pip to
# download the missing source distribution archives from PyPI (we
# retry a couple of times in case pip reports recoverable
# errors).
for i in range(max_retries):
try:
return self.download_source_dists(arguments, use_wheels=use_wheels)
except Exception as e:
if i + 1 < max_retries:
# On all but the last iteration we swallow exceptions
# during downloading.
logger.warning("pip raised exception while downloading distributions: %s", e)
else:
# On the last iteration we don't swallow exceptions
# during downloading because the error reported by pip
# is the most sensible error for us to report.
raise
logger.info("Retrying after pip failed (%i/%i) ..", i + 1, max_retries) | python | def get_requirements(self, arguments, max_retries=None, use_wheels=False):
"""
Use pip to download and unpack the requested source distribution archives.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param max_retries: The maximum number of times that pip will be asked
to download distribution archives (this helps to
deal with intermittent failures). If this is
:data:`None` then :attr:`~.Config.max_retries` is
used.
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
.. warning:: Requirements which are already installed are not included
in the result. If this breaks your use case consider using
pip's ``--ignore-installed`` option.
"""
arguments = self.decorate_arguments(arguments)
# Demote hash sum mismatch log messages from CRITICAL to DEBUG (hiding
# implementation details from users unless they want to see them).
with DownloadLogFilter():
with SetupRequiresPatch(self.config, self.eggs_links):
# Use a new build directory for each run of get_requirements().
self.create_build_directory()
# Check whether -U or --upgrade was given.
if any(match_option(a, '-U', '--upgrade') for a in arguments):
logger.info("Checking index(es) for new version (-U or --upgrade was given) ..")
else:
# If -U or --upgrade wasn't given and all requirements can be
# satisfied using the archives in pip-accel's local source
# index we don't need pip to connect to PyPI looking for new
# versions (that will just slow us down).
try:
return self.unpack_source_dists(arguments, use_wheels=use_wheels)
except DistributionNotFound:
logger.info("We don't have all distribution archives yet!")
# Get the maximum number of retries from the configuration if the
# caller didn't specify a preference.
if max_retries is None:
max_retries = self.config.max_retries
# If not all requirements are available locally we use pip to
# download the missing source distribution archives from PyPI (we
# retry a couple of times in case pip reports recoverable
# errors).
for i in range(max_retries):
try:
return self.download_source_dists(arguments, use_wheels=use_wheels)
except Exception as e:
if i + 1 < max_retries:
# On all but the last iteration we swallow exceptions
# during downloading.
logger.warning("pip raised exception while downloading distributions: %s", e)
else:
# On the last iteration we don't swallow exceptions
# during downloading because the error reported by pip
# is the most sensible error for us to report.
raise
logger.info("Retrying after pip failed (%i/%i) ..", i + 1, max_retries) | Use pip to download and unpack the requested source distribution archives.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param max_retries: The maximum number of times that pip will be asked
to download distribution archives (this helps to
deal with intermittent failures). If this is
:data:`None` then :attr:`~.Config.max_retries` is
used.
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
.. warning:: Requirements which are already installed are not included
in the result. If this breaks your use case consider using
pip's ``--ignore-installed`` option. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L262-L321 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.decorate_arguments | def decorate_arguments(self, arguments):
"""
Change pathnames of local files into ``file://`` URLs with ``#md5=...`` fragments.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:returns: A copy of the command line arguments with pathnames of local
files rewritten to ``file://`` URLs.
When pip-accel calls pip to download missing distribution archives and
the user specified the pathname of a local distribution archive on the
command line, pip will (by default) *not* copy the archive into the
download directory if an archive for the same package name and
version is already present.
This can lead to the confusing situation where the user specifies a
local distribution archive to install, a different (older) archive for
the same package and version is present in the download directory and
`pip-accel` installs the older archive instead of the newer archive.
To avoid this confusing behavior, the :func:`decorate_arguments()`
method rewrites the command line arguments given to ``pip install`` so
that pathnames of local archives are changed into ``file://`` URLs that
include a fragment with the hash of the file's contents. Here's an
example:
- Local pathname: ``/tmp/pep8-1.6.3a0.tar.gz``
- File URL: ``file:///tmp/pep8-1.6.3a0.tar.gz#md5=19cbf0b633498ead63fb3c66e5f1caf6``
When pip fills the download directory and encounters a previously
cached distribution archive it will check the hash, realize the
contents have changed and replace the archive in the download
directory.
"""
arguments = list(arguments)
for i, value in enumerate(arguments):
is_constraint_file = (i >= 1 and match_option(arguments[i - 1], '-c', '--constraint'))
is_requirement_file = (i >= 1 and match_option(arguments[i - 1], '-r', '--requirement'))
if not is_constraint_file and not is_requirement_file and os.path.isfile(value):
arguments[i] = '%s#md5=%s' % (create_file_url(value), hash_files('md5', value))
return arguments | python | def decorate_arguments(self, arguments):
"""
Change pathnames of local files into ``file://`` URLs with ``#md5=...`` fragments.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:returns: A copy of the command line arguments with pathnames of local
files rewritten to ``file://`` URLs.
When pip-accel calls pip to download missing distribution archives and
the user specified the pathname of a local distribution archive on the
command line, pip will (by default) *not* copy the archive into the
download directory if an archive for the same package name and
version is already present.
This can lead to the confusing situation where the user specifies a
local distribution archive to install, a different (older) archive for
the same package and version is present in the download directory and
`pip-accel` installs the older archive instead of the newer archive.
To avoid this confusing behavior, the :func:`decorate_arguments()`
method rewrites the command line arguments given to ``pip install`` so
that pathnames of local archives are changed into ``file://`` URLs that
include a fragment with the hash of the file's contents. Here's an
example:
- Local pathname: ``/tmp/pep8-1.6.3a0.tar.gz``
- File URL: ``file:///tmp/pep8-1.6.3a0.tar.gz#md5=19cbf0b633498ead63fb3c66e5f1caf6``
When pip fills the download directory and encounters a previously
cached distribution archive it will check the hash, realize the
contents have changed and replace the archive in the download
directory.
"""
arguments = list(arguments)
for i, value in enumerate(arguments):
is_constraint_file = (i >= 1 and match_option(arguments[i - 1], '-c', '--constraint'))
is_requirement_file = (i >= 1 and match_option(arguments[i - 1], '-r', '--requirement'))
if not is_constraint_file and not is_requirement_file and os.path.isfile(value):
arguments[i] = '%s#md5=%s' % (create_file_url(value), hash_files('md5', value))
return arguments | Change pathnames of local files into ``file://`` URLs with ``#md5=...`` fragments.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:returns: A copy of the command line arguments with pathnames of local
files rewritten to ``file://`` URLs.
When pip-accel calls pip to download missing distribution archives and
the user specified the pathname of a local distribution archive on the
command line, pip will (by default) *not* copy the archive into the
download directory if an archive for the same package name and
version is already present.
This can lead to the confusing situation where the user specifies a
local distribution archive to install, a different (older) archive for
the same package and version is present in the download directory and
`pip-accel` installs the older archive instead of the newer archive.
To avoid this confusing behavior, the :func:`decorate_arguments()`
method rewrites the command line arguments given to ``pip install`` so
that pathnames of local archives are changed into ``file://`` URLs that
include a fragment with the hash of the file's contents. Here's an
example:
- Local pathname: ``/tmp/pep8-1.6.3a0.tar.gz``
- File URL: ``file:///tmp/pep8-1.6.3a0.tar.gz#md5=19cbf0b633498ead63fb3c66e5f1caf6``
When pip fills the download directory and encounters a previously
cached distribution archive it will check the hash, realize the
contents have changed and replace the archive in the download
directory. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L323-L363 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.unpack_source_dists | def unpack_source_dists(self, arguments, use_wheels=False):
"""
Find and unpack local source distributions and discover their metadata.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:returns: A list of :class:`pip_accel.req.Requirement` objects.
:raises: Any exceptions raised by pip, for example
:exc:`pip.exceptions.DistributionNotFound` when not all
requirements can be satisfied.
This function checks whether there are local source distributions
available for all requirements, unpacks the source distribution
archives and finds the names and versions of the requirements. By using
the ``pip install --download`` command we avoid reimplementing the
following pip features:
- Parsing of ``requirements.txt`` (including recursive parsing).
- Resolution of possibly conflicting pinned requirements.
- Unpacking source distributions in multiple formats.
- Finding the name & version of a given source distribution.
"""
unpack_timer = Timer()
logger.info("Unpacking distribution(s) ..")
with PatchedAttribute(pip_install_module, 'PackageFinder', CustomPackageFinder):
requirements = self.get_pip_requirement_set(arguments, use_remote_index=False, use_wheels=use_wheels)
logger.info("Finished unpacking %s in %s.", pluralize(len(requirements), "distribution"), unpack_timer)
return requirements | python | def unpack_source_dists(self, arguments, use_wheels=False):
"""
Find and unpack local source distributions and discover their metadata.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:returns: A list of :class:`pip_accel.req.Requirement` objects.
:raises: Any exceptions raised by pip, for example
:exc:`pip.exceptions.DistributionNotFound` when not all
requirements can be satisfied.
This function checks whether there are local source distributions
available for all requirements, unpacks the source distribution
archives and finds the names and versions of the requirements. By using
the ``pip install --download`` command we avoid reimplementing the
following pip features:
- Parsing of ``requirements.txt`` (including recursive parsing).
- Resolution of possibly conflicting pinned requirements.
- Unpacking source distributions in multiple formats.
- Finding the name & version of a given source distribution.
"""
unpack_timer = Timer()
logger.info("Unpacking distribution(s) ..")
with PatchedAttribute(pip_install_module, 'PackageFinder', CustomPackageFinder):
requirements = self.get_pip_requirement_set(arguments, use_remote_index=False, use_wheels=use_wheels)
logger.info("Finished unpacking %s in %s.", pluralize(len(requirements), "distribution"), unpack_timer)
return requirements | Find and unpack local source distributions and discover their metadata.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:returns: A list of :class:`pip_accel.req.Requirement` objects.
:raises: Any exceptions raised by pip, for example
:exc:`pip.exceptions.DistributionNotFound` when not all
requirements can be satisfied.
This function checks whether there are local source distributions
available for all requirements, unpacks the source distribution
archives and finds the names and versions of the requirements. By using
the ``pip install --download`` command we avoid reimplementing the
following pip features:
- Parsing of ``requirements.txt`` (including recursive parsing).
- Resolution of possibly conflicting pinned requirements.
- Unpacking source distributions in multiple formats.
- Finding the name & version of a given source distribution. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L365-L395 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.download_source_dists | def download_source_dists(self, arguments, use_wheels=False):
"""
Download missing source distributions.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:raises: Any exceptions raised by pip.
"""
download_timer = Timer()
logger.info("Downloading missing distribution(s) ..")
requirements = self.get_pip_requirement_set(arguments, use_remote_index=True, use_wheels=use_wheels)
logger.info("Finished downloading distribution(s) in %s.", download_timer)
return requirements | python | def download_source_dists(self, arguments, use_wheels=False):
"""
Download missing source distributions.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:raises: Any exceptions raised by pip.
"""
download_timer = Timer()
logger.info("Downloading missing distribution(s) ..")
requirements = self.get_pip_requirement_set(arguments, use_remote_index=True, use_wheels=use_wheels)
logger.info("Finished downloading distribution(s) in %s.", download_timer)
return requirements | Download missing source distributions.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:raises: Any exceptions raised by pip. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L397-L412 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.get_pip_requirement_set | def get_pip_requirement_set(self, arguments, use_remote_index, use_wheels=False):
"""
Get the unpacked requirement(s) specified by the caller by running pip.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_remote_index: A boolean indicating whether pip is allowed to
connect to the main package index
(http://pypi.python.org by default).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:returns: A :class:`pip.req.RequirementSet` object created by pip.
:raises: Any exceptions raised by pip.
"""
# Compose the pip command line arguments. This is where a lot of the
# core logic of pip-accel is hidden and it uses some esoteric features
# of pip so this method is heavily commented.
command_line = []
# Use `--download' to instruct pip to download requirement(s) into
# pip-accel's local source distribution index directory. This has the
# following documented side effects (see `pip install --help'):
# 1. It disables the installation of requirements (without using the
# `--no-install' option which is deprecated and slated for removal
# in pip 7.x).
# 2. It ignores requirements that are already installed (because
# pip-accel doesn't actually need to re-install requirements that
# are already installed we will have work around this later, but
# that seems fairly simple to do).
command_line.append('--download=%s' % self.config.source_index)
# Use `--find-links' to point pip at pip-accel's local source
# distribution index directory. This ensures that source distribution
# archives are never downloaded more than once (regardless of the HTTP
# cache that was introduced in pip 6.x).
command_line.append('--find-links=%s' % create_file_url(self.config.source_index))
# Use `--no-binary=:all:' to ignore wheel distributions by default in
# order to preserve backwards compatibility with callers that expect a
# requirement set consisting only of source distributions that can be
# converted to `dumb binary distributions'.
if not use_wheels and self.arguments_allow_wheels(arguments):
command_line.append('--no-binary=:all:')
# Use `--no-index' to force pip to only consider source distribution
# archives contained in pip-accel's local source distribution index
# directory. This enables pip-accel to ask pip "Can the local source
# distribution index satisfy all requirements in the given requirement
# set?" which enables pip-accel to keep pip off the internet unless
# absolutely necessary :-).
if not use_remote_index:
command_line.append('--no-index')
# Use `--no-clean' to instruct pip to unpack the source distribution
# archives and *not* clean up the unpacked source distributions
# afterwards. This enables pip-accel to replace pip's installation
# logic with cached binary distribution archives.
command_line.append('--no-clean')
# Use `--build-directory' to instruct pip to unpack the source
# distribution archives to a temporary directory managed by pip-accel.
# We will clean up the build directory when we're done using the
# unpacked source distributions.
command_line.append('--build-directory=%s' % self.build_directory)
# Append the user's `pip install ...' arguments to the command line
# that we just assembled.
command_line.extend(arguments)
logger.info("Executing command: pip install %s", ' '.join(command_line))
# Clear the build directory to prevent PreviousBuildDirError exceptions.
self.clear_build_directory()
# During the pip 6.x upgrade pip-accel switched to using `pip install
# --download' which can produce an interactive prompt as described in
# issue 51 [1]. The documented way [2] to get rid of this interactive
# prompt is pip's --exists-action option, but due to what is most
# likely a bug in pip this doesn't actually work. The environment
# variable $PIP_EXISTS_ACTION does work however, so if the user didn't
# set it we will set a reasonable default for them.
# [1] https://github.com/paylogic/pip-accel/issues/51
# [2] https://pip.pypa.io/en/latest/reference/pip.html#exists-action-option
os.environ.setdefault('PIP_EXISTS_ACTION', 'w')
# Initialize and run the `pip install' command.
command = InstallCommand()
opts, args = command.parse_args(command_line)
if not opts.ignore_installed:
# If the user didn't supply the -I, --ignore-installed option we
# will forcefully disable the option. Refer to the documentation of
# the AttributeOverrides class for further details.
opts = AttributeOverrides(opts, ignore_installed=False)
requirement_set = command.run(opts, args)
# Make sure the output of pip and pip-accel are not intermingled.
sys.stdout.flush()
if requirement_set is None:
raise NothingToDoError("""
pip didn't generate a requirement set, most likely you
specified an empty requirements file?
""")
else:
return self.transform_pip_requirement_set(requirement_set) | python | def get_pip_requirement_set(self, arguments, use_remote_index, use_wheels=False):
"""
Get the unpacked requirement(s) specified by the caller by running pip.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_remote_index: A boolean indicating whether pip is allowed to
connect to the main package index
(http://pypi.python.org by default).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:returns: A :class:`pip.req.RequirementSet` object created by pip.
:raises: Any exceptions raised by pip.
"""
# Compose the pip command line arguments. This is where a lot of the
# core logic of pip-accel is hidden and it uses some esoteric features
# of pip so this method is heavily commented.
command_line = []
# Use `--download' to instruct pip to download requirement(s) into
# pip-accel's local source distribution index directory. This has the
# following documented side effects (see `pip install --help'):
# 1. It disables the installation of requirements (without using the
# `--no-install' option which is deprecated and slated for removal
# in pip 7.x).
# 2. It ignores requirements that are already installed (because
# pip-accel doesn't actually need to re-install requirements that
# are already installed we will have work around this later, but
# that seems fairly simple to do).
command_line.append('--download=%s' % self.config.source_index)
# Use `--find-links' to point pip at pip-accel's local source
# distribution index directory. This ensures that source distribution
# archives are never downloaded more than once (regardless of the HTTP
# cache that was introduced in pip 6.x).
command_line.append('--find-links=%s' % create_file_url(self.config.source_index))
# Use `--no-binary=:all:' to ignore wheel distributions by default in
# order to preserve backwards compatibility with callers that expect a
# requirement set consisting only of source distributions that can be
# converted to `dumb binary distributions'.
if not use_wheels and self.arguments_allow_wheels(arguments):
command_line.append('--no-binary=:all:')
# Use `--no-index' to force pip to only consider source distribution
# archives contained in pip-accel's local source distribution index
# directory. This enables pip-accel to ask pip "Can the local source
# distribution index satisfy all requirements in the given requirement
# set?" which enables pip-accel to keep pip off the internet unless
# absolutely necessary :-).
if not use_remote_index:
command_line.append('--no-index')
# Use `--no-clean' to instruct pip to unpack the source distribution
# archives and *not* clean up the unpacked source distributions
# afterwards. This enables pip-accel to replace pip's installation
# logic with cached binary distribution archives.
command_line.append('--no-clean')
# Use `--build-directory' to instruct pip to unpack the source
# distribution archives to a temporary directory managed by pip-accel.
# We will clean up the build directory when we're done using the
# unpacked source distributions.
command_line.append('--build-directory=%s' % self.build_directory)
# Append the user's `pip install ...' arguments to the command line
# that we just assembled.
command_line.extend(arguments)
logger.info("Executing command: pip install %s", ' '.join(command_line))
# Clear the build directory to prevent PreviousBuildDirError exceptions.
self.clear_build_directory()
# During the pip 6.x upgrade pip-accel switched to using `pip install
# --download' which can produce an interactive prompt as described in
# issue 51 [1]. The documented way [2] to get rid of this interactive
# prompt is pip's --exists-action option, but due to what is most
# likely a bug in pip this doesn't actually work. The environment
# variable $PIP_EXISTS_ACTION does work however, so if the user didn't
# set it we will set a reasonable default for them.
# [1] https://github.com/paylogic/pip-accel/issues/51
# [2] https://pip.pypa.io/en/latest/reference/pip.html#exists-action-option
os.environ.setdefault('PIP_EXISTS_ACTION', 'w')
# Initialize and run the `pip install' command.
command = InstallCommand()
opts, args = command.parse_args(command_line)
if not opts.ignore_installed:
# If the user didn't supply the -I, --ignore-installed option we
# will forcefully disable the option. Refer to the documentation of
# the AttributeOverrides class for further details.
opts = AttributeOverrides(opts, ignore_installed=False)
requirement_set = command.run(opts, args)
# Make sure the output of pip and pip-accel are not intermingled.
sys.stdout.flush()
if requirement_set is None:
raise NothingToDoError("""
pip didn't generate a requirement set, most likely you
specified an empty requirements file?
""")
else:
return self.transform_pip_requirement_set(requirement_set) | Get the unpacked requirement(s) specified by the caller by running pip.
:param arguments: The command line arguments to ``pip install ...`` (a
list of strings).
:param use_remote_index: A boolean indicating whether pip is allowed to
connect to the main package index
(http://pypi.python.org by default).
:param use_wheels: Whether pip and pip-accel are allowed to use wheels_
(:data:`False` by default for backwards compatibility
with callers that use pip-accel as a Python API).
:returns: A :class:`pip.req.RequirementSet` object created by pip.
:raises: Any exceptions raised by pip. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L414-L506 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.transform_pip_requirement_set | def transform_pip_requirement_set(self, requirement_set):
"""
Transform pip's requirement set into one that `pip-accel` can work with.
:param requirement_set: The :class:`pip.req.RequirementSet` object
reported by pip.
:returns: A list of :class:`pip_accel.req.Requirement` objects.
This function converts the :class:`pip.req.RequirementSet` object
reported by pip into a list of :class:`pip_accel.req.Requirement`
objects.
"""
filtered_requirements = []
for requirement in requirement_set.requirements.values():
# The `satisfied_by' property is set by pip when a requirement is
# already satisfied (i.e. a version of the package that satisfies
# the requirement is already installed) and -I, --ignore-installed
# is not used. We filter out these requirements because pip never
# unpacks distributions for these requirements, so pip-accel can't
# do anything useful with such requirements.
if requirement.satisfied_by:
continue
# The `constraint' property marks requirement objects that
# constrain the acceptable version(s) of another requirement but
# don't define a requirement themselves, so we filter them out.
if requirement.constraint:
continue
# All other requirements are reported to callers.
filtered_requirements.append(requirement)
self.reported_requirements.append(requirement)
return sorted([Requirement(self.config, r) for r in filtered_requirements],
key=lambda r: r.name.lower()) | python | def transform_pip_requirement_set(self, requirement_set):
"""
Transform pip's requirement set into one that `pip-accel` can work with.
:param requirement_set: The :class:`pip.req.RequirementSet` object
reported by pip.
:returns: A list of :class:`pip_accel.req.Requirement` objects.
This function converts the :class:`pip.req.RequirementSet` object
reported by pip into a list of :class:`pip_accel.req.Requirement`
objects.
"""
filtered_requirements = []
for requirement in requirement_set.requirements.values():
# The `satisfied_by' property is set by pip when a requirement is
# already satisfied (i.e. a version of the package that satisfies
# the requirement is already installed) and -I, --ignore-installed
# is not used. We filter out these requirements because pip never
# unpacks distributions for these requirements, so pip-accel can't
# do anything useful with such requirements.
if requirement.satisfied_by:
continue
# The `constraint' property marks requirement objects that
# constrain the acceptable version(s) of another requirement but
# don't define a requirement themselves, so we filter them out.
if requirement.constraint:
continue
# All other requirements are reported to callers.
filtered_requirements.append(requirement)
self.reported_requirements.append(requirement)
return sorted([Requirement(self.config, r) for r in filtered_requirements],
key=lambda r: r.name.lower()) | Transform pip's requirement set into one that `pip-accel` can work with.
:param requirement_set: The :class:`pip.req.RequirementSet` object
reported by pip.
:returns: A list of :class:`pip_accel.req.Requirement` objects.
This function converts the :class:`pip.req.RequirementSet` object
reported by pip into a list of :class:`pip_accel.req.Requirement`
objects. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L508-L539 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.install_requirements | def install_requirements(self, requirements, **kw):
"""
Manually install a requirement set from binary and/or wheel distributions.
:param requirements: A list of :class:`pip_accel.req.Requirement` objects.
:param kw: Any keyword arguments are passed on to
:func:`~pip_accel.bdist.BinaryDistributionManager.install_binary_dist()`.
:returns: The number of packages that were just installed (an integer).
"""
install_timer = Timer()
install_types = []
if any(not req.is_wheel for req in requirements):
install_types.append('binary')
if any(req.is_wheel for req in requirements):
install_types.append('wheel')
logger.info("Installing from %s distributions ..", concatenate(install_types))
# Track installed files by default (unless the caller specifically opted out).
kw.setdefault('track_installed_files', True)
num_installed = 0
for requirement in requirements:
# When installing setuptools we need to uninstall distribute,
# otherwise distribute will shadow setuptools and all sorts of
# strange issues can occur (e.g. upgrading to the latest
# setuptools to gain wheel support and then having everything
# blow up because distribute doesn't know about wheels).
if requirement.name == 'setuptools' and is_installed('distribute'):
uninstall('distribute')
if requirement.is_editable:
logger.debug("Installing %s in editable form using pip.", requirement)
with TransactionalUpdate(requirement):
command = InstallCommand()
opts, args = command.parse_args(['--no-deps', '--editable', requirement.source_directory])
command.run(opts, args)
elif requirement.is_wheel:
logger.info("Installing %s wheel distribution using pip ..", requirement)
with TransactionalUpdate(requirement):
wheel_version = pip_wheel_module.wheel_version(requirement.source_directory)
pip_wheel_module.check_compatibility(wheel_version, requirement.name)
requirement.pip_requirement.move_wheel_files(requirement.source_directory)
else:
logger.info("Installing %s binary distribution using pip-accel ..", requirement)
with TransactionalUpdate(requirement):
binary_distribution = self.bdists.get_binary_dist(requirement)
self.bdists.install_binary_dist(binary_distribution, **kw)
num_installed += 1
logger.info("Finished installing %s in %s.",
pluralize(num_installed, "requirement"),
install_timer)
return num_installed | python | def install_requirements(self, requirements, **kw):
"""
Manually install a requirement set from binary and/or wheel distributions.
:param requirements: A list of :class:`pip_accel.req.Requirement` objects.
:param kw: Any keyword arguments are passed on to
:func:`~pip_accel.bdist.BinaryDistributionManager.install_binary_dist()`.
:returns: The number of packages that were just installed (an integer).
"""
install_timer = Timer()
install_types = []
if any(not req.is_wheel for req in requirements):
install_types.append('binary')
if any(req.is_wheel for req in requirements):
install_types.append('wheel')
logger.info("Installing from %s distributions ..", concatenate(install_types))
# Track installed files by default (unless the caller specifically opted out).
kw.setdefault('track_installed_files', True)
num_installed = 0
for requirement in requirements:
# When installing setuptools we need to uninstall distribute,
# otherwise distribute will shadow setuptools and all sorts of
# strange issues can occur (e.g. upgrading to the latest
# setuptools to gain wheel support and then having everything
# blow up because distribute doesn't know about wheels).
if requirement.name == 'setuptools' and is_installed('distribute'):
uninstall('distribute')
if requirement.is_editable:
logger.debug("Installing %s in editable form using pip.", requirement)
with TransactionalUpdate(requirement):
command = InstallCommand()
opts, args = command.parse_args(['--no-deps', '--editable', requirement.source_directory])
command.run(opts, args)
elif requirement.is_wheel:
logger.info("Installing %s wheel distribution using pip ..", requirement)
with TransactionalUpdate(requirement):
wheel_version = pip_wheel_module.wheel_version(requirement.source_directory)
pip_wheel_module.check_compatibility(wheel_version, requirement.name)
requirement.pip_requirement.move_wheel_files(requirement.source_directory)
else:
logger.info("Installing %s binary distribution using pip-accel ..", requirement)
with TransactionalUpdate(requirement):
binary_distribution = self.bdists.get_binary_dist(requirement)
self.bdists.install_binary_dist(binary_distribution, **kw)
num_installed += 1
logger.info("Finished installing %s in %s.",
pluralize(num_installed, "requirement"),
install_timer)
return num_installed | Manually install a requirement set from binary and/or wheel distributions.
:param requirements: A list of :class:`pip_accel.req.Requirement` objects.
:param kw: Any keyword arguments are passed on to
:func:`~pip_accel.bdist.BinaryDistributionManager.install_binary_dist()`.
:returns: The number of packages that were just installed (an integer). | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L541-L589 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.clear_build_directory | def clear_build_directory(self):
"""Clear the build directory where pip unpacks the source distribution archives."""
stat = os.stat(self.build_directory)
shutil.rmtree(self.build_directory)
os.makedirs(self.build_directory, stat.st_mode) | python | def clear_build_directory(self):
"""Clear the build directory where pip unpacks the source distribution archives."""
stat = os.stat(self.build_directory)
shutil.rmtree(self.build_directory)
os.makedirs(self.build_directory, stat.st_mode) | Clear the build directory where pip unpacks the source distribution archives. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L612-L616 |
paylogic/pip-accel | pip_accel/__init__.py | PipAccelerator.cleanup_temporary_directories | def cleanup_temporary_directories(self):
"""Delete the build directories and any temporary directories created by pip."""
while self.build_directories:
shutil.rmtree(self.build_directories.pop())
for requirement in self.reported_requirements:
requirement.remove_temporary_source()
while self.eggs_links:
symbolic_link = self.eggs_links.pop()
if os.path.islink(symbolic_link):
os.unlink(symbolic_link) | python | def cleanup_temporary_directories(self):
"""Delete the build directories and any temporary directories created by pip."""
while self.build_directories:
shutil.rmtree(self.build_directories.pop())
for requirement in self.reported_requirements:
requirement.remove_temporary_source()
while self.eggs_links:
symbolic_link = self.eggs_links.pop()
if os.path.islink(symbolic_link):
os.unlink(symbolic_link) | Delete the build directories and any temporary directories created by pip. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L618-L627 |
paylogic/pip-accel | pip_accel/__init__.py | DownloadLogFilter.filter | def filter(self, record):
"""Change the severity of selected log records."""
if isinstance(record.msg, basestring):
message = record.msg.lower()
if all(kw in message for kw in self.KEYWORDS):
record.levelname = 'DEBUG'
record.levelno = logging.DEBUG
return 1 | python | def filter(self, record):
"""Change the severity of selected log records."""
if isinstance(record.msg, basestring):
message = record.msg.lower()
if all(kw in message for kw in self.KEYWORDS):
record.levelname = 'DEBUG'
record.levelno = logging.DEBUG
return 1 | Change the severity of selected log records. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/__init__.py#L662-L669 |
paylogic/pip-accel | pip_accel/cli.py | main | def main():
"""The command line interface for the ``pip-accel`` program."""
arguments = sys.argv[1:]
# If no arguments are given, the help text of pip-accel is printed.
if not arguments:
usage()
sys.exit(0)
# If no install subcommand is given we pass the command line straight
# to pip without any changes and exit immediately afterwards.
if 'install' not in arguments:
# This will not return.
os.execvp('pip', ['pip'] + arguments)
else:
arguments = [arg for arg in arguments if arg != 'install']
config = Config()
# Initialize logging output.
coloredlogs.install(
fmt=config.log_format,
level=config.log_verbosity,
)
# Adjust verbosity based on -v, -q, --verbose, --quiet options.
for argument in list(arguments):
if match_option(argument, '-v', '--verbose'):
coloredlogs.increase_verbosity()
elif match_option(argument, '-q', '--quiet'):
coloredlogs.decrease_verbosity()
# Perform the requested action(s).
try:
accelerator = PipAccelerator(config)
accelerator.install_from_arguments(arguments)
except NothingToDoError as e:
# Don't print a traceback for this (it's not very user friendly) and
# exit with status zero to stay compatible with pip. For more details
# please refer to https://github.com/paylogic/pip-accel/issues/47.
logger.warning("%s", e)
sys.exit(0)
except Exception:
logger.exception("Caught unhandled exception!")
sys.exit(1) | python | def main():
"""The command line interface for the ``pip-accel`` program."""
arguments = sys.argv[1:]
# If no arguments are given, the help text of pip-accel is printed.
if not arguments:
usage()
sys.exit(0)
# If no install subcommand is given we pass the command line straight
# to pip without any changes and exit immediately afterwards.
if 'install' not in arguments:
# This will not return.
os.execvp('pip', ['pip'] + arguments)
else:
arguments = [arg for arg in arguments if arg != 'install']
config = Config()
# Initialize logging output.
coloredlogs.install(
fmt=config.log_format,
level=config.log_verbosity,
)
# Adjust verbosity based on -v, -q, --verbose, --quiet options.
for argument in list(arguments):
if match_option(argument, '-v', '--verbose'):
coloredlogs.increase_verbosity()
elif match_option(argument, '-q', '--quiet'):
coloredlogs.decrease_verbosity()
# Perform the requested action(s).
try:
accelerator = PipAccelerator(config)
accelerator.install_from_arguments(arguments)
except NothingToDoError as e:
# Don't print a traceback for this (it's not very user friendly) and
# exit with status zero to stay compatible with pip. For more details
# please refer to https://github.com/paylogic/pip-accel/issues/47.
logger.warning("%s", e)
sys.exit(0)
except Exception:
logger.exception("Caught unhandled exception!")
sys.exit(1) | The command line interface for the ``pip-accel`` program. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/cli.py#L28-L66 |
paylogic/pip-accel | pip_accel/caches/local.py | LocalCacheBackend.get | def get(self, filename):
"""
Check if a distribution archive exists in the local cache.
:param filename: The filename of the distribution archive (a string).
:returns: The pathname of a distribution archive on the local file
system or :data:`None`.
"""
pathname = os.path.join(self.config.binary_cache, filename)
if os.path.isfile(pathname):
logger.debug("Distribution archive exists in local cache (%s).", pathname)
return pathname
else:
logger.debug("Distribution archive doesn't exist in local cache (%s).", pathname)
return None | python | def get(self, filename):
"""
Check if a distribution archive exists in the local cache.
:param filename: The filename of the distribution archive (a string).
:returns: The pathname of a distribution archive on the local file
system or :data:`None`.
"""
pathname = os.path.join(self.config.binary_cache, filename)
if os.path.isfile(pathname):
logger.debug("Distribution archive exists in local cache (%s).", pathname)
return pathname
else:
logger.debug("Distribution archive doesn't exist in local cache (%s).", pathname)
return None | Check if a distribution archive exists in the local cache.
:param filename: The filename of the distribution archive (a string).
:returns: The pathname of a distribution archive on the local file
system or :data:`None`. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/caches/local.py#L40-L54 |
paylogic/pip-accel | pip_accel/caches/local.py | LocalCacheBackend.put | def put(self, filename, handle):
"""
Store a distribution archive in the local cache.
:param filename: The filename of the distribution archive (a string).
:param handle: A file-like object that provides access to the
distribution archive.
"""
file_in_cache = os.path.join(self.config.binary_cache, filename)
logger.debug("Storing distribution archive in local cache: %s", file_in_cache)
makedirs(os.path.dirname(file_in_cache))
# Stream the contents of the distribution archive to a temporary file
# to avoid race conditions (e.g. partial reads) between multiple
# processes that are using the local cache at the same time.
with AtomicReplace(file_in_cache) as temporary_file:
with open(temporary_file, 'wb') as temporary_file_handle:
shutil.copyfileobj(handle, temporary_file_handle)
logger.debug("Finished caching distribution archive in local cache.") | python | def put(self, filename, handle):
"""
Store a distribution archive in the local cache.
:param filename: The filename of the distribution archive (a string).
:param handle: A file-like object that provides access to the
distribution archive.
"""
file_in_cache = os.path.join(self.config.binary_cache, filename)
logger.debug("Storing distribution archive in local cache: %s", file_in_cache)
makedirs(os.path.dirname(file_in_cache))
# Stream the contents of the distribution archive to a temporary file
# to avoid race conditions (e.g. partial reads) between multiple
# processes that are using the local cache at the same time.
with AtomicReplace(file_in_cache) as temporary_file:
with open(temporary_file, 'wb') as temporary_file_handle:
shutil.copyfileobj(handle, temporary_file_handle)
logger.debug("Finished caching distribution archive in local cache.") | Store a distribution archive in the local cache.
:param filename: The filename of the distribution archive (a string).
:param handle: A file-like object that provides access to the
distribution archive. | https://github.com/paylogic/pip-accel/blob/ccad1b784927a322d996db593403b1d2d2e22666/pip_accel/caches/local.py#L56-L73 |
fle/django-multi-email-field | multi_email_field/forms.py | MultiEmailField.to_python | def to_python(self, value):
"Normalize data to a list of strings."
# Return None if no input was given.
if not value:
return []
return [v.strip() for v in value.splitlines() if v != ""] | python | def to_python(self, value):
"Normalize data to a list of strings."
# Return None if no input was given.
if not value:
return []
return [v.strip() for v in value.splitlines() if v != ""] | Normalize data to a list of strings. | https://github.com/fle/django-multi-email-field/blob/5488ab91053b8f7ed6c36a07c28d56efe85b1daf/multi_email_field/forms.py#L14-L19 |
fle/django-multi-email-field | multi_email_field/forms.py | MultiEmailField.validate | def validate(self, value):
"Check if value consists only of valid emails."
# Use the parent's handling of required fields, etc.
super(MultiEmailField, self).validate(value)
try:
for email in value:
validate_email(email)
except ValidationError:
raise ValidationError(self.message, code=self.code) | python | def validate(self, value):
"Check if value consists only of valid emails."
# Use the parent's handling of required fields, etc.
super(MultiEmailField, self).validate(value)
try:
for email in value:
validate_email(email)
except ValidationError:
raise ValidationError(self.message, code=self.code) | Check if value consists only of valid emails. | https://github.com/fle/django-multi-email-field/blob/5488ab91053b8f7ed6c36a07c28d56efe85b1daf/multi_email_field/forms.py#L21-L30 |
fle/django-multi-email-field | multi_email_field/widgets.py | MultiEmailWidget.prep_value | def prep_value(self, value):
""" Prepare value before effectively render widget """
if value in MULTI_EMAIL_FIELD_EMPTY_VALUES:
return ""
elif isinstance(value, six.string_types):
return value
elif isinstance(value, list):
return "\n".join(value)
raise ValidationError('Invalid format.') | python | def prep_value(self, value):
""" Prepare value before effectively render widget """
if value in MULTI_EMAIL_FIELD_EMPTY_VALUES:
return ""
elif isinstance(value, six.string_types):
return value
elif isinstance(value, list):
return "\n".join(value)
raise ValidationError('Invalid format.') | Prepare value before effectively render widget | https://github.com/fle/django-multi-email-field/blob/5488ab91053b8f7ed6c36a07c28d56efe85b1daf/multi_email_field/widgets.py#L14-L22 |
ElementAI/greensim | greensim/__init__.py | pause | def pause() -> None:
"""
Pauses the current process indefinitely -- it will require another process to `resume()` it. When this resumption
happens, the process returns from this function.
"""
if _logger is not None:
_log(INFO, "Process", local.name, "pause")
Process.current().rsim()._gr.switch() | python | def pause() -> None:
"""
Pauses the current process indefinitely -- it will require another process to `resume()` it. When this resumption
happens, the process returns from this function.
"""
if _logger is not None:
_log(INFO, "Process", local.name, "pause")
Process.current().rsim()._gr.switch() | Pauses the current process indefinitely -- it will require another process to `resume()` it. When this resumption
happens, the process returns from this function. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L552-L559 |
ElementAI/greensim | greensim/__init__.py | advance | def advance(delay: float) -> None:
"""
Pauses the current process for the given delay (in simulated time). The process will be resumed when the simulation
has advanced to the moment corresponding to `now() + delay`.
"""
if _logger is not None:
_log(INFO, "Process", local.name, "advance", delay=delay)
curr = Process.current()
rsim = curr.rsim
id_wakeup = rsim()._schedule(delay, curr.switch) # type: ignore
try:
rsim()._gr.switch() # type: ignore
except Interrupt:
rsim()._cancel(id_wakeup) # type: ignore
raise | python | def advance(delay: float) -> None:
"""
Pauses the current process for the given delay (in simulated time). The process will be resumed when the simulation
has advanced to the moment corresponding to `now() + delay`.
"""
if _logger is not None:
_log(INFO, "Process", local.name, "advance", delay=delay)
curr = Process.current()
rsim = curr.rsim
id_wakeup = rsim()._schedule(delay, curr.switch) # type: ignore
try:
rsim()._gr.switch() # type: ignore
except Interrupt:
rsim()._cancel(id_wakeup) # type: ignore
raise | Pauses the current process for the given delay (in simulated time). The process will be resumed when the simulation
has advanced to the moment corresponding to `now() + delay`. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L562-L577 |
ElementAI/greensim | greensim/__init__.py | happens | def happens(intervals: Iterable[float], name: Optional[str] = None) -> Callable:
"""
Decorator used to set up a process that adds a new instance of another process at intervals dictated by the given
sequence (which may be infinite).
Example: the following program runs process named `my_process` 5 times, each time spaced by 2.0 time units.
```
from itertools import repeat
sim = Simulator()
log = []
@happens(repeat(2.0, 5))
def my_process(the_log):
the_log.append(now())
sim.add(my_process, log)
sim.run()
print(str(log)) # Expect: [2.0, 4.0, 6.0, 8.0, 10.0]
```
"""
def hook(event: Callable):
def make_happen(*args_event: Any, **kwargs_event: Any) -> None:
if name is not None:
local.name = cast(str, name)
for interval in intervals:
advance(interval)
add(event, *args_event, **kwargs_event)
return make_happen
return hook | python | def happens(intervals: Iterable[float], name: Optional[str] = None) -> Callable:
"""
Decorator used to set up a process that adds a new instance of another process at intervals dictated by the given
sequence (which may be infinite).
Example: the following program runs process named `my_process` 5 times, each time spaced by 2.0 time units.
```
from itertools import repeat
sim = Simulator()
log = []
@happens(repeat(2.0, 5))
def my_process(the_log):
the_log.append(now())
sim.add(my_process, log)
sim.run()
print(str(log)) # Expect: [2.0, 4.0, 6.0, 8.0, 10.0]
```
"""
def hook(event: Callable):
def make_happen(*args_event: Any, **kwargs_event: Any) -> None:
if name is not None:
local.name = cast(str, name)
for interval in intervals:
advance(interval)
add(event, *args_event, **kwargs_event)
return make_happen
return hook | Decorator used to set up a process that adds a new instance of another process at intervals dictated by the given
sequence (which may be infinite).
Example: the following program runs process named `my_process` 5 times, each time spaced by 2.0 time units.
```
from itertools import repeat
sim = Simulator()
log = []
@happens(repeat(2.0, 5))
def my_process(the_log):
the_log.append(now())
sim.add(my_process, log)
sim.run()
print(str(log)) # Expect: [2.0, 4.0, 6.0, 8.0, 10.0]
``` | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L606-L637 |
ElementAI/greensim | greensim/__init__.py | tagged | def tagged(*tags: Tags) -> Callable:
global GREENSIM_TAG_ATTRIBUTE
"""
Decorator for adding a label to the process.
These labels are applied to any child Processes produced by event
"""
def hook(event: Callable):
def wrapper(*args, **kwargs):
event(*args, **kwargs)
setattr(wrapper, GREENSIM_TAG_ATTRIBUTE, tags)
return wrapper
return hook | python | def tagged(*tags: Tags) -> Callable:
global GREENSIM_TAG_ATTRIBUTE
"""
Decorator for adding a label to the process.
These labels are applied to any child Processes produced by event
"""
def hook(event: Callable):
def wrapper(*args, **kwargs):
event(*args, **kwargs)
setattr(wrapper, GREENSIM_TAG_ATTRIBUTE, tags)
return wrapper
return hook | Decorator for adding a label to the process.
These labels are applied to any child Processes produced by event | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L640-L651 |
ElementAI/greensim | greensim/__init__.py | select | def select(*signals: Signal, **kwargs) -> List[Signal]:
"""
Allows the current process to wait for multiple concurrent signals. Waits until one of the signals turns on, at
which point this signal is returned.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
stops waiting on the set of :py:class:`Signal`s. In such a situation, a :py:class:`Timeout` exception is raised
on the process.
"""
class CleanUp(Interrupt):
pass
timeout = kwargs.get("timeout", None)
if not isinstance(timeout, (float, int, type(None))):
raise ValueError("The timeout keyword parameter can be either None or a number.")
def wait_one(signal: Signal, common: Signal) -> None:
try:
signal.wait()
common.turn_on()
except CleanUp:
pass
# We simply sets up multiple sub-processes respectively waiting for one of the signals. Once one of them has fired,
# the others will all run no-op eventually, so no need for any explicit clean-up.
common = Signal(name=local.name + "-selector").turn_off()
if _logger is not None:
_log(INFO, "select", "select", "select", signals=[sig.name for sig in signals])
procs = []
for signal in signals:
procs.append(add(wait_one, signal, common))
try:
common.wait(timeout)
finally:
for proc in procs: # Clean up the support processes.
proc.interrupt(CleanUp())
return [signal for signal in signals if signal.is_on] | python | def select(*signals: Signal, **kwargs) -> List[Signal]:
"""
Allows the current process to wait for multiple concurrent signals. Waits until one of the signals turns on, at
which point this signal is returned.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
stops waiting on the set of :py:class:`Signal`s. In such a situation, a :py:class:`Timeout` exception is raised
on the process.
"""
class CleanUp(Interrupt):
pass
timeout = kwargs.get("timeout", None)
if not isinstance(timeout, (float, int, type(None))):
raise ValueError("The timeout keyword parameter can be either None or a number.")
def wait_one(signal: Signal, common: Signal) -> None:
try:
signal.wait()
common.turn_on()
except CleanUp:
pass
# We simply sets up multiple sub-processes respectively waiting for one of the signals. Once one of them has fired,
# the others will all run no-op eventually, so no need for any explicit clean-up.
common = Signal(name=local.name + "-selector").turn_off()
if _logger is not None:
_log(INFO, "select", "select", "select", signals=[sig.name for sig in signals])
procs = []
for signal in signals:
procs.append(add(wait_one, signal, common))
try:
common.wait(timeout)
finally:
for proc in procs: # Clean up the support processes.
proc.interrupt(CleanUp())
return [signal for signal in signals if signal.is_on] | Allows the current process to wait for multiple concurrent signals. Waits until one of the signals turns on, at
which point this signal is returned.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
stops waiting on the set of :py:class:`Signal`s. In such a situation, a :py:class:`Timeout` exception is raised
on the process. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L834-L873 |
ElementAI/greensim | greensim/__init__.py | Simulator.events | def events(self) -> Iterable[Tuple[Optional[float], Callable, Sequence[Any], Mapping[str, Any]]]:
"""
Iterates over scheduled events. Each event is a 4-tuple composed of the moment (on the simulated clock) the
event should execute, the function corresponding to the event, its positional parameters (as a tuple of
arbitrary length), and its keyword parameters (as a dictionary).
"""
return (
(event.timestamp, event.fn, event.args, event.kwargs)
for event in self._events
if not event.is_cancelled
) | python | def events(self) -> Iterable[Tuple[Optional[float], Callable, Sequence[Any], Mapping[str, Any]]]:
"""
Iterates over scheduled events. Each event is a 4-tuple composed of the moment (on the simulated clock) the
event should execute, the function corresponding to the event, its positional parameters (as a tuple of
arbitrary length), and its keyword parameters (as a dictionary).
"""
return (
(event.timestamp, event.fn, event.args, event.kwargs)
for event in self._events
if not event.is_cancelled
) | Iterates over scheduled events. Each event is a 4-tuple composed of the moment (on the simulated clock) the
event should execute, the function corresponding to the event, its positional parameters (as a tuple of
arbitrary length), and its keyword parameters (as a dictionary). | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L220-L230 |
ElementAI/greensim | greensim/__init__.py | Simulator._schedule | def _schedule(self, delay: float, event: Callable, *args: Any, **kwargs: Any) -> int:
"""
Schedules a one-time event to be run along the simulation. The event is scheduled relative to current simulator
time, so delay is expected to be a positive simulation time interval. The `event' parameter corresponds to a
callable object (e.g. a function): it will be called so as to "execute" the event, with the positional and
keyword parameters that follow `event` in the call to `_schedule()` (note that the value of these arguments are
evaluated when `_schedule()` is called, not when the event is executed). Once this event function returns, the
simulation carries on to the next event, or stops if none remain.
Remark that this method is private, and is meant for internal usage by the :py:class:`Simulator` and
:py:class:`Process` classes, and helper functions of this module.
:return: Unique identifier for the scheduled event.
"""
if _logger is not None:
self._log(
DEBUG,
"schedule",
delay=delay,
fn=event,
args=args,
kwargs=kwargs,
counter=self._counter,
__now=self.now()
)
delay = float(delay)
if delay < 0.0:
raise ValueError("Delay must be positive.")
# Use counter to strictly order events happening at the same simulated time. This gives a total order on events,
# working around the heap queue not yielding a stable ordering.
id_event = self._counter
heappush(self._events, _Event(self._ts_now + delay, id_event, event, *args, **kwargs))
self._counter += 1
return id_event | python | def _schedule(self, delay: float, event: Callable, *args: Any, **kwargs: Any) -> int:
"""
Schedules a one-time event to be run along the simulation. The event is scheduled relative to current simulator
time, so delay is expected to be a positive simulation time interval. The `event' parameter corresponds to a
callable object (e.g. a function): it will be called so as to "execute" the event, with the positional and
keyword parameters that follow `event` in the call to `_schedule()` (note that the value of these arguments are
evaluated when `_schedule()` is called, not when the event is executed). Once this event function returns, the
simulation carries on to the next event, or stops if none remain.
Remark that this method is private, and is meant for internal usage by the :py:class:`Simulator` and
:py:class:`Process` classes, and helper functions of this module.
:return: Unique identifier for the scheduled event.
"""
if _logger is not None:
self._log(
DEBUG,
"schedule",
delay=delay,
fn=event,
args=args,
kwargs=kwargs,
counter=self._counter,
__now=self.now()
)
delay = float(delay)
if delay < 0.0:
raise ValueError("Delay must be positive.")
# Use counter to strictly order events happening at the same simulated time. This gives a total order on events,
# working around the heap queue not yielding a stable ordering.
id_event = self._counter
heappush(self._events, _Event(self._ts_now + delay, id_event, event, *args, **kwargs))
self._counter += 1
return id_event | Schedules a one-time event to be run along the simulation. The event is scheduled relative to current simulator
time, so delay is expected to be a positive simulation time interval. The `event' parameter corresponds to a
callable object (e.g. a function): it will be called so as to "execute" the event, with the positional and
keyword parameters that follow `event` in the call to `_schedule()` (note that the value of these arguments are
evaluated when `_schedule()` is called, not when the event is executed). Once this event function returns, the
simulation carries on to the next event, or stops if none remain.
Remark that this method is private, and is meant for internal usage by the :py:class:`Simulator` and
:py:class:`Process` classes, and helper functions of this module.
:return: Unique identifier for the scheduled event. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L232-L266 |
ElementAI/greensim | greensim/__init__.py | Simulator._cancel | def _cancel(self, id_cancel) -> None:
"""
Cancels a previously scheduled event. This method is private, and is meant for internal usage by the
:py:class:`Simulator` and :py:class:`Process` classes, and helper functions of this module.
"""
if _logger is not None:
self._log(DEBUG, "cancel", id=id_cancel)
for event in self._events:
if event.identifier == id_cancel:
event.cancel()
break | python | def _cancel(self, id_cancel) -> None:
"""
Cancels a previously scheduled event. This method is private, and is meant for internal usage by the
:py:class:`Simulator` and :py:class:`Process` classes, and helper functions of this module.
"""
if _logger is not None:
self._log(DEBUG, "cancel", id=id_cancel)
for event in self._events:
if event.identifier == id_cancel:
event.cancel()
break | Cancels a previously scheduled event. This method is private, and is meant for internal usage by the
:py:class:`Simulator` and :py:class:`Process` classes, and helper functions of this module. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L268-L278 |
ElementAI/greensim | greensim/__init__.py | Simulator.add | def add(self, fn_process: Callable, *args: Any, **kwargs: Any) -> 'Process':
"""
Adds a process to the simulation. The process is embodied by a function, which will be called with the given
positional and keyword parameters when the simulation runs. As a process, this function runs on a special green
thread, and thus will be able to call functions `now()`, `advance()`, `pause()` and `stop()` to articulate its
events across the simulated timeline and control the simulation's flow.
"""
return self.add_in(0.0, fn_process, *args, **kwargs) | python | def add(self, fn_process: Callable, *args: Any, **kwargs: Any) -> 'Process':
"""
Adds a process to the simulation. The process is embodied by a function, which will be called with the given
positional and keyword parameters when the simulation runs. As a process, this function runs on a special green
thread, and thus will be able to call functions `now()`, `advance()`, `pause()` and `stop()` to articulate its
events across the simulated timeline and control the simulation's flow.
"""
return self.add_in(0.0, fn_process, *args, **kwargs) | Adds a process to the simulation. The process is embodied by a function, which will be called with the given
positional and keyword parameters when the simulation runs. As a process, this function runs on a special green
thread, and thus will be able to call functions `now()`, `advance()`, `pause()` and `stop()` to articulate its
events across the simulated timeline and control the simulation's flow. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L280-L287 |
ElementAI/greensim | greensim/__init__.py | Simulator.add_in | def add_in(self, delay: float, fn_process: Callable, *args: Any, **kwargs: Any) -> 'Process':
"""
Adds a process to the simulation, which is made to start after the given delay in simulated time.
See method add() for more details.
"""
process = Process(self, fn_process, self._gr)
if _logger is not None:
self._log(INFO, "add", __now=self.now(), fn=fn_process, args=args, kwargs=kwargs)
self._schedule(delay, process.switch, *args, **kwargs)
return process | python | def add_in(self, delay: float, fn_process: Callable, *args: Any, **kwargs: Any) -> 'Process':
"""
Adds a process to the simulation, which is made to start after the given delay in simulated time.
See method add() for more details.
"""
process = Process(self, fn_process, self._gr)
if _logger is not None:
self._log(INFO, "add", __now=self.now(), fn=fn_process, args=args, kwargs=kwargs)
self._schedule(delay, process.switch, *args, **kwargs)
return process | Adds a process to the simulation, which is made to start after the given delay in simulated time.
See method add() for more details. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L289-L299 |
ElementAI/greensim | greensim/__init__.py | Simulator.add_at | def add_at(self, moment: float, fn_process: Callable, *args: Any, **kwargs: Any) -> 'Process':
"""
Adds a process to the simulation, which is made to start at the given exact time on the simulated clock. Note
that times in the past when compared to the current moment on the simulated clock are forbidden.
See method add() for more details.
"""
delay = moment - self.now()
if delay < 0.0:
raise ValueError(
f"The given moment to start the process ({moment:f}) is in the past (now is {self.now():f})."
)
return self.add_in(delay, fn_process, *args, **kwargs) | python | def add_at(self, moment: float, fn_process: Callable, *args: Any, **kwargs: Any) -> 'Process':
"""
Adds a process to the simulation, which is made to start at the given exact time on the simulated clock. Note
that times in the past when compared to the current moment on the simulated clock are forbidden.
See method add() for more details.
"""
delay = moment - self.now()
if delay < 0.0:
raise ValueError(
f"The given moment to start the process ({moment:f}) is in the past (now is {self.now():f})."
)
return self.add_in(delay, fn_process, *args, **kwargs) | Adds a process to the simulation, which is made to start at the given exact time on the simulated clock. Note
that times in the past when compared to the current moment on the simulated clock are forbidden.
See method add() for more details. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L301-L313 |
ElementAI/greensim | greensim/__init__.py | Simulator.run | def run(self, duration: float = inf) -> None:
"""
Runs the simulation until a stopping condition is met (no more events, or an event invokes method stop()), or
until the simulated clock hits the given duration.
"""
if _logger is not None:
self._log(INFO, "run", __now=self.now(), duration=duration)
counter_stop_event = None
if duration != inf:
counter_stop_event = self._counter
self._schedule(duration, self.stop)
self._is_running = True
while self.is_running and len(self._events) > 0:
event = heappop(self._events)
self._ts_now = event.timestamp or self._ts_now
event.execute(self)
if len(self._events) == 0:
if _logger is not None:
self._log(DEBUG, "out-of-events", __now=self.now())
self.stop()
if counter_stop_event is not None:
# Change the planned stop to a no-op. We would rather eliminate it, but this would force a re-sort of the
# event queue.
for (i, event) in enumerate(self._events):
if event.identifier == counter_stop_event:
if _logger is not None:
self._log(DEBUG, "cancel-stop", counter=counter_stop_event)
event.cancel()
break | python | def run(self, duration: float = inf) -> None:
"""
Runs the simulation until a stopping condition is met (no more events, or an event invokes method stop()), or
until the simulated clock hits the given duration.
"""
if _logger is not None:
self._log(INFO, "run", __now=self.now(), duration=duration)
counter_stop_event = None
if duration != inf:
counter_stop_event = self._counter
self._schedule(duration, self.stop)
self._is_running = True
while self.is_running and len(self._events) > 0:
event = heappop(self._events)
self._ts_now = event.timestamp or self._ts_now
event.execute(self)
if len(self._events) == 0:
if _logger is not None:
self._log(DEBUG, "out-of-events", __now=self.now())
self.stop()
if counter_stop_event is not None:
# Change the planned stop to a no-op. We would rather eliminate it, but this would force a re-sort of the
# event queue.
for (i, event) in enumerate(self._events):
if event.identifier == counter_stop_event:
if _logger is not None:
self._log(DEBUG, "cancel-stop", counter=counter_stop_event)
event.cancel()
break | Runs the simulation until a stopping condition is met (no more events, or an event invokes method stop()), or
until the simulated clock hits the given duration. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L315-L346 |
ElementAI/greensim | greensim/__init__.py | Simulator.step | def step(self) -> None:
"""
Runs a single event of the simulation.
"""
event = heappop(self._events)
self._ts_now = event.timestamp or self._ts_now
event.execute(self) | python | def step(self) -> None:
"""
Runs a single event of the simulation.
"""
event = heappop(self._events)
self._ts_now = event.timestamp or self._ts_now
event.execute(self) | Runs a single event of the simulation. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L348-L354 |
ElementAI/greensim | greensim/__init__.py | Simulator.stop | def stop(self) -> None:
"""
Stops the running simulation once the current event is done executing.
"""
if self.is_running:
if _logger is not None:
self._log(INFO, "stop", __now=self.now())
self._is_running = False | python | def stop(self) -> None:
"""
Stops the running simulation once the current event is done executing.
"""
if self.is_running:
if _logger is not None:
self._log(INFO, "stop", __now=self.now())
self._is_running = False | Stops the running simulation once the current event is done executing. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L356-L363 |
ElementAI/greensim | greensim/__init__.py | Simulator._clear | def _clear(self) -> None:
"""
Resets the internal state of the simulator, and sets the simulated clock back to 0.0. This discards all
outstanding events and tears down hanging process instances.
"""
for _, event, _, _ in self.events():
if hasattr(event, "__self__") and isinstance(event.__self__, Process): # type: ignore
event.__self__.throw() # type: ignore
self._events.clear()
self._ts_now = 0.0 | python | def _clear(self) -> None:
"""
Resets the internal state of the simulator, and sets the simulated clock back to 0.0. This discards all
outstanding events and tears down hanging process instances.
"""
for _, event, _, _ in self.events():
if hasattr(event, "__self__") and isinstance(event.__self__, Process): # type: ignore
event.__self__.throw() # type: ignore
self._events.clear()
self._ts_now = 0.0 | Resets the internal state of the simulator, and sets the simulated clock back to 0.0. This discards all
outstanding events and tears down hanging process instances. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L372-L381 |
ElementAI/greensim | greensim/__init__.py | Process._run | def _run(self, *args: Any, **kwargs: Any) -> None:
"""
Wraps around the process body (the function that implements a process within the simulation) so as to catch the
eventual Interrupt that may terminate the process.
"""
try:
self._body(*args, **kwargs)
if _logger is not None:
_log(INFO, "Process", self.local.name, "die-finish")
except Interrupt:
if _logger is not None:
_log(INFO, "Process", self.local.name, "die-interrupt") | python | def _run(self, *args: Any, **kwargs: Any) -> None:
"""
Wraps around the process body (the function that implements a process within the simulation) so as to catch the
eventual Interrupt that may terminate the process.
"""
try:
self._body(*args, **kwargs)
if _logger is not None:
_log(INFO, "Process", self.local.name, "die-finish")
except Interrupt:
if _logger is not None:
_log(INFO, "Process", self.local.name, "die-interrupt") | Wraps around the process body (the function that implements a process within the simulation) so as to catch the
eventual Interrupt that may terminate the process. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L467-L478 |
ElementAI/greensim | greensim/__init__.py | Process._bind_and_call_constructor | def _bind_and_call_constructor(self, t: type, *args) -> None:
"""
Accesses the __init__ method of a type directly and calls it with *args
This allows the constructors of both superclasses to be called, as described in get_binding.md
This could be done using two calls to super() with a hack based on how Python searches __mro__:
```
super().__init__(run, parent) # calls greenlet.greenlet.__init__
super(greenlet.greenlet, self).__init__() # calls TaggedObject.__init__
```
Python will always find greenlet.greenlet first since it is specified first, but will ignore it if it is
the first argument to super, which is meant to indicate the subclass and thus is not meant to be called on
See: https://docs.python.org/3.7/library/functions.html#super
This is indirect, confusing, and not in following with the purpose of super(), so the direct method was used
"""
t.__init__.__get__(self)(*args) | python | def _bind_and_call_constructor(self, t: type, *args) -> None:
"""
Accesses the __init__ method of a type directly and calls it with *args
This allows the constructors of both superclasses to be called, as described in get_binding.md
This could be done using two calls to super() with a hack based on how Python searches __mro__:
```
super().__init__(run, parent) # calls greenlet.greenlet.__init__
super(greenlet.greenlet, self).__init__() # calls TaggedObject.__init__
```
Python will always find greenlet.greenlet first since it is specified first, but will ignore it if it is
the first argument to super, which is meant to indicate the subclass and thus is not meant to be called on
See: https://docs.python.org/3.7/library/functions.html#super
This is indirect, confusing, and not in following with the purpose of super(), so the direct method was used
"""
t.__init__.__get__(self)(*args) | Accesses the __init__ method of a type directly and calls it with *args
This allows the constructors of both superclasses to be called, as described in get_binding.md
This could be done using two calls to super() with a hack based on how Python searches __mro__:
```
super().__init__(run, parent) # calls greenlet.greenlet.__init__
super(greenlet.greenlet, self).__init__() # calls TaggedObject.__init__
```
Python will always find greenlet.greenlet first since it is specified first, but will ignore it if it is
the first argument to super, which is meant to indicate the subclass and thus is not meant to be called on
See: https://docs.python.org/3.7/library/functions.html#super
This is indirect, confusing, and not in following with the purpose of super(), so the direct method was used | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L480-L500 |
ElementAI/greensim | greensim/__init__.py | Process.current | def current() -> 'Process':
"""
Returns the instance of the process that is executing at the current moment.
"""
curr = greenlet.getcurrent()
if not isinstance(curr, Process):
raise TypeError("Current greenlet does not correspond to a Process instance.")
return cast(Process, greenlet.getcurrent()) | python | def current() -> 'Process':
"""
Returns the instance of the process that is executing at the current moment.
"""
curr = greenlet.getcurrent()
if not isinstance(curr, Process):
raise TypeError("Current greenlet does not correspond to a Process instance.")
return cast(Process, greenlet.getcurrent()) | Returns the instance of the process that is executing at the current moment. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L503-L510 |
ElementAI/greensim | greensim/__init__.py | Process.resume | def resume(self) -> None:
"""
Resumes a process that has been previously paused by invoking function `pause()`. This does not interrupt the
current process or event: it merely schedules again the target process, so that its execution carries on at the
return of the `pause()` function, when this new wake-up event fires.
"""
if _logger is not None:
_log(INFO, "Process", self.local.name, "resume")
self.rsim()._schedule(0.0, self.switch) | python | def resume(self) -> None:
"""
Resumes a process that has been previously paused by invoking function `pause()`. This does not interrupt the
current process or event: it merely schedules again the target process, so that its execution carries on at the
return of the `pause()` function, when this new wake-up event fires.
"""
if _logger is not None:
_log(INFO, "Process", self.local.name, "resume")
self.rsim()._schedule(0.0, self.switch) | Resumes a process that has been previously paused by invoking function `pause()`. This does not interrupt the
current process or event: it merely schedules again the target process, so that its execution carries on at the
return of the `pause()` function, when this new wake-up event fires. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L519-L527 |
ElementAI/greensim | greensim/__init__.py | Process.interrupt | def interrupt(self, inter: Optional[Interrupt] = None) -> None:
"""
Interrupts a process that has been previously :py:meth:`pause`d or made to :py:meth:`advance`, by resuming it
immediately and raising an :py:class:`Interrupt` exception on it. This exception can be captured by the
interrupted process and leveraged for various purposes, such as timing out on a wait or generating activity
prompting immediate reaction.
:param inter:
Exception to raise on the :py:class:`Process`; if ``None`` is given, an instance of :py:class:`Interrupt` is
raised. This allows one to use specialized :py:class:`Interrupt` subclasses to as to implement
non-interfering mixed interruption stacks. For instance, a process may advance towards a certain timeout as
it waits for multiple resources concurrently. Should it hit the timeout, it would :py:meth:`interrupt` the
waiting processes so as to clean up after itself. If these processes have themselves a timeout mechanism of
their own, also based on interrupts, using a subclass can help them distinguish between these and the
clean-up interrupts.
"""
if inter is None:
inter = Interrupt()
if _logger is not None:
_log(INFO, "Process", self.local.name, "interrupt", type=type(inter).__name__)
self.rsim()._schedule(0.0, self.throw, inter) | python | def interrupt(self, inter: Optional[Interrupt] = None) -> None:
"""
Interrupts a process that has been previously :py:meth:`pause`d or made to :py:meth:`advance`, by resuming it
immediately and raising an :py:class:`Interrupt` exception on it. This exception can be captured by the
interrupted process and leveraged for various purposes, such as timing out on a wait or generating activity
prompting immediate reaction.
:param inter:
Exception to raise on the :py:class:`Process`; if ``None`` is given, an instance of :py:class:`Interrupt` is
raised. This allows one to use specialized :py:class:`Interrupt` subclasses to as to implement
non-interfering mixed interruption stacks. For instance, a process may advance towards a certain timeout as
it waits for multiple resources concurrently. Should it hit the timeout, it would :py:meth:`interrupt` the
waiting processes so as to clean up after itself. If these processes have themselves a timeout mechanism of
their own, also based on interrupts, using a subclass can help them distinguish between these and the
clean-up interrupts.
"""
if inter is None:
inter = Interrupt()
if _logger is not None:
_log(INFO, "Process", self.local.name, "interrupt", type=type(inter).__name__)
self.rsim()._schedule(0.0, self.throw, inter) | Interrupts a process that has been previously :py:meth:`pause`d or made to :py:meth:`advance`, by resuming it
immediately and raising an :py:class:`Interrupt` exception on it. This exception can be captured by the
interrupted process and leveraged for various purposes, such as timing out on a wait or generating activity
prompting immediate reaction.
:param inter:
Exception to raise on the :py:class:`Process`; if ``None`` is given, an instance of :py:class:`Interrupt` is
raised. This allows one to use specialized :py:class:`Interrupt` subclasses to as to implement
non-interfering mixed interruption stacks. For instance, a process may advance towards a certain timeout as
it waits for multiple resources concurrently. Should it hit the timeout, it would :py:meth:`interrupt` the
waiting processes so as to clean up after itself. If these processes have themselves a timeout mechanism of
their own, also based on interrupts, using a subclass can help them distinguish between these and the
clean-up interrupts. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L529-L549 |
ElementAI/greensim | greensim/__init__.py | Queue.join | def join(self, timeout: Optional[float] = None):
"""
Can be invoked only by a process: makes it join the queue. The order token is computed once for the process,
before it is enqueued. Another process or event, or control code of some sort, must invoke method `pop()` of the
queue so that the process can eventually leave the queue and carry on with its execution.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process.
"""
class CancelBalk(Interrupt):
pass
self._counter += 1
if _logger is not None:
self._log(INFO, "join")
heappush(self._waiting, (self._get_order_token(self._counter), Process.current()))
proc_balk = None
if timeout is not None:
def balk(proc):
nonlocal proc_balk
try:
advance(cast(float, timeout))
proc.interrupt(Timeout())
except CancelBalk:
pass
finally:
proc_balk = None
# The balking process is started here.
proc_balk = add(balk, Process.current())
try:
pause()
except Interrupt:
current = Process.current()
for index in reversed([i for i, (_, proc) in enumerate(self._waiting) if proc is current]):
del self._waiting[index]
heapify(self._waiting)
raise
finally:
# Three situations can prompt a process to exit a queue:
#
# 1. The process is pop()ped out of the queue by a peer.
# 2. The process balk()s out after a timeout.
# 3. The process leaves the queue because of a distinct interrupt (besides CancelBalk).
#
# In cases 1 and 3, the balking process has never exited and is still in the advance() call. In both these
# cases, the balking process should itself be interrupted, otherwise it may prompt the balking of a future
# queue traversal. However, if we exit the queue because of case no. 2, the balking process is finished.
# Interrupting it would do no harm (it has been tested by accident), but we mean to be deliberate about when
# this interruption is necessary. So we perform the interrupt of the balking process only in cases 1 and 3;
# in case 2, the balk() function exits, thereby clearing the reference we have here to it. Do remark that
# whenever a timeout is not set, proc_balk remains None all the way, reducing the situation to case 1.
if proc_balk is not None:
proc_balk.interrupt(CancelBalk()) | python | def join(self, timeout: Optional[float] = None):
"""
Can be invoked only by a process: makes it join the queue. The order token is computed once for the process,
before it is enqueued. Another process or event, or control code of some sort, must invoke method `pop()` of the
queue so that the process can eventually leave the queue and carry on with its execution.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process.
"""
class CancelBalk(Interrupt):
pass
self._counter += 1
if _logger is not None:
self._log(INFO, "join")
heappush(self._waiting, (self._get_order_token(self._counter), Process.current()))
proc_balk = None
if timeout is not None:
def balk(proc):
nonlocal proc_balk
try:
advance(cast(float, timeout))
proc.interrupt(Timeout())
except CancelBalk:
pass
finally:
proc_balk = None
# The balking process is started here.
proc_balk = add(balk, Process.current())
try:
pause()
except Interrupt:
current = Process.current()
for index in reversed([i for i, (_, proc) in enumerate(self._waiting) if proc is current]):
del self._waiting[index]
heapify(self._waiting)
raise
finally:
# Three situations can prompt a process to exit a queue:
#
# 1. The process is pop()ped out of the queue by a peer.
# 2. The process balk()s out after a timeout.
# 3. The process leaves the queue because of a distinct interrupt (besides CancelBalk).
#
# In cases 1 and 3, the balking process has never exited and is still in the advance() call. In both these
# cases, the balking process should itself be interrupted, otherwise it may prompt the balking of a future
# queue traversal. However, if we exit the queue because of case no. 2, the balking process is finished.
# Interrupting it would do no harm (it has been tested by accident), but we mean to be deliberate about when
# this interruption is necessary. So we perform the interrupt of the balking process only in cases 1 and 3;
# in case 2, the balk() function exits, thereby clearing the reference we have here to it. Do remark that
# whenever a timeout is not set, proc_balk remains None all the way, reducing the situation to case 1.
if proc_balk is not None:
proc_balk.interrupt(CancelBalk()) | Can be invoked only by a process: makes it join the queue. The order token is computed once for the process,
before it is enqueued. Another process or event, or control code of some sort, must invoke method `pop()` of the
queue so that the process can eventually leave the queue and carry on with its execution.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L705-L761 |
ElementAI/greensim | greensim/__init__.py | Queue.pop | def pop(self):
"""
Removes the top process from the queue, and resumes its execution. For an empty queue, this method is a no-op.
This method may be invoked from anywhere (its use is not confined to processes, as method `join()` is).
"""
if not self.is_empty():
_, process = heappop(self._waiting)
if _logger is not None:
self._log(INFO, "pop", process=process.local.name)
process.resume() | python | def pop(self):
"""
Removes the top process from the queue, and resumes its execution. For an empty queue, this method is a no-op.
This method may be invoked from anywhere (its use is not confined to processes, as method `join()` is).
"""
if not self.is_empty():
_, process = heappop(self._waiting)
if _logger is not None:
self._log(INFO, "pop", process=process.local.name)
process.resume() | Removes the top process from the queue, and resumes its execution. For an empty queue, this method is a no-op.
This method may be invoked from anywhere (its use is not confined to processes, as method `join()` is). | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L763-L772 |
ElementAI/greensim | greensim/__init__.py | Signal.turn_on | def turn_on(self) -> "Signal":
"""
Turns on the signal. If processes are waiting, they are all resumed. This may be invoked from any code.
Remark that while processes are simultaneously resumed in simulated time, they are effectively resumed in the
sequence corresponding to the queue discipline. Therefore, if one of the resumed processes turns the signal back
off, remaining resumed processes join back the queue. If the queue discipline is not monotonic (for instance,
if it bears a random component), then this toggling of the signal may reorder the processes.
"""
if _logger is not None:
self._log(INFO, "turn-on")
self._is_on = True
while not self._queue.is_empty():
self._queue.pop()
return self | python | def turn_on(self) -> "Signal":
"""
Turns on the signal. If processes are waiting, they are all resumed. This may be invoked from any code.
Remark that while processes are simultaneously resumed in simulated time, they are effectively resumed in the
sequence corresponding to the queue discipline. Therefore, if one of the resumed processes turns the signal back
off, remaining resumed processes join back the queue. If the queue discipline is not monotonic (for instance,
if it bears a random component), then this toggling of the signal may reorder the processes.
"""
if _logger is not None:
self._log(INFO, "turn-on")
self._is_on = True
while not self._queue.is_empty():
self._queue.pop()
return self | Turns on the signal. If processes are waiting, they are all resumed. This may be invoked from any code.
Remark that while processes are simultaneously resumed in simulated time, they are effectively resumed in the
sequence corresponding to the queue discipline. Therefore, if one of the resumed processes turns the signal back
off, remaining resumed processes join back the queue. If the queue discipline is not monotonic (for instance,
if it bears a random component), then this toggling of the signal may reorder the processes. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L794-L808 |
ElementAI/greensim | greensim/__init__.py | Signal.turn_off | def turn_off(self) -> "Signal":
"""
Turns off the signal. This may be invoked from any code.
"""
if _logger is not None:
self._log(INFO, "turn-off")
self._is_on = False
return self | python | def turn_off(self) -> "Signal":
"""
Turns off the signal. This may be invoked from any code.
"""
if _logger is not None:
self._log(INFO, "turn-off")
self._is_on = False
return self | Turns off the signal. This may be invoked from any code. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L810-L817 |
ElementAI/greensim | greensim/__init__.py | Signal.wait | def wait(self, timeout: Optional[float] = None) -> None:
"""
Makes the current process wait for the signal. If it is closed, it will join the signal's queue.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
stops waiting for the :py:class:`Signal`. In such a situation, a :py:class:`Timeout` exception is raised on
the process.
"""
if _logger is not None:
self._log(INFO, "wait")
while not self.is_on:
self._queue.join(timeout) | python | def wait(self, timeout: Optional[float] = None) -> None:
"""
Makes the current process wait for the signal. If it is closed, it will join the signal's queue.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
stops waiting for the :py:class:`Signal`. In such a situation, a :py:class:`Timeout` exception is raised on
the process.
"""
if _logger is not None:
self._log(INFO, "wait")
while not self.is_on:
self._queue.join(timeout) | Makes the current process wait for the signal. If it is closed, it will join the signal's queue.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
stops waiting for the :py:class:`Signal`. In such a situation, a :py:class:`Timeout` exception is raised on
the process. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L819-L831 |
ElementAI/greensim | greensim/__init__.py | Resource.take | def take(self, num_instances: int = 1, timeout: Optional[float] = None) -> None:
"""
The current process reserves a certain number of instances. If there are not enough instances available, the
process is made to join a queue. When this method returns, the process holds the instances it has requested to
take.
:param num_instances:
Number of resource instances to take.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process.
"""
if num_instances < 1:
raise ValueError(f"Process must request at least 1 instance; here requested {num_instances}.")
if num_instances > self.num_instances_total:
raise ValueError(
f"Process must request at most {self.num_instances_total} instances; here requested {num_instances}."
)
if _logger is not None:
self._log(INFO, "take", num_instances=num_instances, free=self.num_instances_free)
proc = Process.current()
if self._num_instances_free < num_instances:
proc.local.__num_instances_required = num_instances
try:
self._waiting.join(timeout)
finally:
del proc.local.__num_instances_required
self._num_instances_free -= num_instances
if _logger is not None and proc in self._usage:
self._log(WARNING, "take-again", already=self._usage[proc], more=num_instances)
self._usage.setdefault(proc, 0)
self._usage[proc] += num_instances | python | def take(self, num_instances: int = 1, timeout: Optional[float] = None) -> None:
"""
The current process reserves a certain number of instances. If there are not enough instances available, the
process is made to join a queue. When this method returns, the process holds the instances it has requested to
take.
:param num_instances:
Number of resource instances to take.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process.
"""
if num_instances < 1:
raise ValueError(f"Process must request at least 1 instance; here requested {num_instances}.")
if num_instances > self.num_instances_total:
raise ValueError(
f"Process must request at most {self.num_instances_total} instances; here requested {num_instances}."
)
if _logger is not None:
self._log(INFO, "take", num_instances=num_instances, free=self.num_instances_free)
proc = Process.current()
if self._num_instances_free < num_instances:
proc.local.__num_instances_required = num_instances
try:
self._waiting.join(timeout)
finally:
del proc.local.__num_instances_required
self._num_instances_free -= num_instances
if _logger is not None and proc in self._usage:
self._log(WARNING, "take-again", already=self._usage[proc], more=num_instances)
self._usage.setdefault(proc, 0)
self._usage[proc] += num_instances | The current process reserves a certain number of instances. If there are not enough instances available, the
process is made to join a queue. When this method returns, the process holds the instances it has requested to
take.
:param num_instances:
Number of resource instances to take.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L912-L944 |
ElementAI/greensim | greensim/__init__.py | Resource.release | def release(self, num_instances: int = 1) -> None:
"""
The current process releases instances it has previously taken. It may thus release less than it has taken.
These released instances become free. If the total number of free instances then satisfy the request of the top
process of the waiting queue, it is popped off the queue and resumed.
"""
proc = Process.current()
error_format = "Process %s holds %s instances, but requests to release more (%s)"
if self._usage.get(proc, 0) > 0:
if num_instances > self._usage[proc]:
raise ValueError(
error_format % (proc.local.name, self._usage[proc], num_instances)
)
self._usage[proc] -= num_instances
self._num_instances_free += num_instances
if _logger is not None:
self._log(
INFO,
"release",
num_instances=num_instances,
keeping=self._usage[proc],
free=self.num_instances_free
)
if self._usage[proc] <= 0:
del self._usage[proc]
if not self._waiting.is_empty():
num_instances_next = cast(int, self._waiting.peek().local.__num_instances_required)
if num_instances_next <= self.num_instances_free:
self._waiting.pop()
elif _logger is not None:
self._log(DEBUG, "release-nopop", next_requires=num_instances_next, free=self.num_instances_free)
elif _logger is not None:
self._log(DEBUG, "release-queueempty")
else:
raise RuntimeError(
f"Process {proc.local.name} tries to release {num_instances} instances, but is holding none.)"
) | python | def release(self, num_instances: int = 1) -> None:
"""
The current process releases instances it has previously taken. It may thus release less than it has taken.
These released instances become free. If the total number of free instances then satisfy the request of the top
process of the waiting queue, it is popped off the queue and resumed.
"""
proc = Process.current()
error_format = "Process %s holds %s instances, but requests to release more (%s)"
if self._usage.get(proc, 0) > 0:
if num_instances > self._usage[proc]:
raise ValueError(
error_format % (proc.local.name, self._usage[proc], num_instances)
)
self._usage[proc] -= num_instances
self._num_instances_free += num_instances
if _logger is not None:
self._log(
INFO,
"release",
num_instances=num_instances,
keeping=self._usage[proc],
free=self.num_instances_free
)
if self._usage[proc] <= 0:
del self._usage[proc]
if not self._waiting.is_empty():
num_instances_next = cast(int, self._waiting.peek().local.__num_instances_required)
if num_instances_next <= self.num_instances_free:
self._waiting.pop()
elif _logger is not None:
self._log(DEBUG, "release-nopop", next_requires=num_instances_next, free=self.num_instances_free)
elif _logger is not None:
self._log(DEBUG, "release-queueempty")
else:
raise RuntimeError(
f"Process {proc.local.name} tries to release {num_instances} instances, but is holding none.)"
) | The current process releases instances it has previously taken. It may thus release less than it has taken.
These released instances become free. If the total number of free instances then satisfy the request of the top
process of the waiting queue, it is popped off the queue and resumed. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L946-L982 |
ElementAI/greensim | greensim/__init__.py | Resource.using | def using(self, num_instances: int = 1, timeout: Optional[float] = None):
"""
Context manager around resource reservation: when the code block under the with statement is entered, the
current process holds the instances it requested. When it exits, all these instances are released.
Do not explicitly `release()` instances within the context block, at the risk of breaking instance management.
If one needs to `release()` instances piecemeal, it should instead reserve the instances using `take()`.
:param num_instances:
Number of resource instances to take.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process.
"""
self.take(num_instances, timeout)
yield self
self.release(num_instances) | python | def using(self, num_instances: int = 1, timeout: Optional[float] = None):
"""
Context manager around resource reservation: when the code block under the with statement is entered, the
current process holds the instances it requested. When it exits, all these instances are released.
Do not explicitly `release()` instances within the context block, at the risk of breaking instance management.
If one needs to `release()` instances piecemeal, it should instead reserve the instances using `take()`.
:param num_instances:
Number of resource instances to take.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process.
"""
self.take(num_instances, timeout)
yield self
self.release(num_instances) | Context manager around resource reservation: when the code block under the with statement is entered, the
current process holds the instances it requested. When it exits, all these instances are released.
Do not explicitly `release()` instances within the context block, at the risk of breaking instance management.
If one needs to `release()` instances piecemeal, it should instead reserve the instances using `take()`.
:param num_instances:
Number of resource instances to take.
:param timeout:
If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and
leaves the queue forcibly. In such a situation, a :py:class:`Timeout` exception is raised on the process. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/__init__.py#L985-L1001 |
ElementAI/greensim | greensim/progress.py | capture_print | def capture_print(file_dest_maybe: Optional[IO] = None):
"""Progress capture that writes updated metrics to an interactive terminal."""
file_dest: IO = file_dest_maybe or sys.stderr
def _print_progress(progress_min: float, rt_remaining: float, _mc: MeasureComparison) -> None:
nonlocal file_dest
percent_progress = progress_min * 100.0
time_remaining, unit = _display_time(rt_remaining)
print(
f"Progress: {percent_progress:.1f}% -- Time remaining: {time_remaining} {unit} ",
end="\r",
file=file_dest
)
return _print_progress | python | def capture_print(file_dest_maybe: Optional[IO] = None):
"""Progress capture that writes updated metrics to an interactive terminal."""
file_dest: IO = file_dest_maybe or sys.stderr
def _print_progress(progress_min: float, rt_remaining: float, _mc: MeasureComparison) -> None:
nonlocal file_dest
percent_progress = progress_min * 100.0
time_remaining, unit = _display_time(rt_remaining)
print(
f"Progress: {percent_progress:.1f}% -- Time remaining: {time_remaining} {unit} ",
end="\r",
file=file_dest
)
return _print_progress | Progress capture that writes updated metrics to an interactive terminal. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/progress.py#L28-L42 |
ElementAI/greensim | greensim/progress.py | track_progress | def track_progress(
measure: MeasureProgress,
target: MetricProgress,
interval_check: float,
capture_maybe: Optional[CaptureProgress] = None
) -> None:
"""
Tracks progress against a certain end condition of the simulation (for instance, a certain duration on the simulated
clock), reporting this progress as the simulation chugs along. Stops the simulation once the target has been
reached. By default, the progress is reported as printout on standard output, in a manner that works best for
digital terminals.
"""
def measure_to_target() -> MeasureComparison:
return list(zip(measure(), target))
def is_finished(progress: MeasureComparison) -> bool:
return all(p >= t for p, t in progress)
capture = capture_maybe or capture_print()
rt_started = now_real()
while True:
advance(interval_check)
rt_elapsed = now_real() - rt_started
progress = measure_to_target()
ratio_progress_min = min(m / t for m, t in progress)
if ratio_progress_min == 0.0:
rt_total_projected = inf
else:
rt_total_projected = rt_elapsed / ratio_progress_min
capture(ratio_progress_min, rt_total_projected - rt_elapsed, progress)
if is_finished(progress):
stop()
break | python | def track_progress(
measure: MeasureProgress,
target: MetricProgress,
interval_check: float,
capture_maybe: Optional[CaptureProgress] = None
) -> None:
"""
Tracks progress against a certain end condition of the simulation (for instance, a certain duration on the simulated
clock), reporting this progress as the simulation chugs along. Stops the simulation once the target has been
reached. By default, the progress is reported as printout on standard output, in a manner that works best for
digital terminals.
"""
def measure_to_target() -> MeasureComparison:
return list(zip(measure(), target))
def is_finished(progress: MeasureComparison) -> bool:
return all(p >= t for p, t in progress)
capture = capture_maybe or capture_print()
rt_started = now_real()
while True:
advance(interval_check)
rt_elapsed = now_real() - rt_started
progress = measure_to_target()
ratio_progress_min = min(m / t for m, t in progress)
if ratio_progress_min == 0.0:
rt_total_projected = inf
else:
rt_total_projected = rt_elapsed / ratio_progress_min
capture(ratio_progress_min, rt_total_projected - rt_elapsed, progress)
if is_finished(progress):
stop()
break | Tracks progress against a certain end condition of the simulation (for instance, a certain duration on the simulated
clock), reporting this progress as the simulation chugs along. Stops the simulation once the target has been
reached. By default, the progress is reported as printout on standard output, in a manner that works best for
digital terminals. | https://github.com/ElementAI/greensim/blob/f160e8b57d69f6ef469f2e991cc07b7721e08a91/greensim/progress.py#L49-L84 |
libnano/primer3-py | primer3/wrappers.py | calcTm | def calcTm(seq, mv_conc=50, dv_conc=0, dntp_conc=0.8, dna_conc=50,
max_nn_length=60, tm_method='santalucia',
salt_corrections_method='santalucia'):
''' Return the tm of `seq` as a float.
'''
tm_meth = _tm_methods.get(tm_method)
if tm_meth is None:
raise ValueError('{} is not a valid tm calculation method'.format(
tm_method))
salt_meth = _salt_corrections_methods.get(salt_corrections_method)
if salt_meth is None:
raise ValueError('{} is not a valid salt correction method'.format(
salt_corrections_method))
# For whatever reason mv_conc and dna_conc have to be ints
args = [pjoin(PRIMER3_HOME, 'oligotm'),
'-mv', str(mv_conc),
'-dv', str(dv_conc),
'-n', str(dntp_conc),
'-d', str(dna_conc),
'-tp', str(tm_meth),
'-sc', str(salt_meth),
seq]
tm = subprocess.check_output(args, stderr=DEV_NULL,
env=os.environ)
return float(tm) | python | def calcTm(seq, mv_conc=50, dv_conc=0, dntp_conc=0.8, dna_conc=50,
max_nn_length=60, tm_method='santalucia',
salt_corrections_method='santalucia'):
''' Return the tm of `seq` as a float.
'''
tm_meth = _tm_methods.get(tm_method)
if tm_meth is None:
raise ValueError('{} is not a valid tm calculation method'.format(
tm_method))
salt_meth = _salt_corrections_methods.get(salt_corrections_method)
if salt_meth is None:
raise ValueError('{} is not a valid salt correction method'.format(
salt_corrections_method))
# For whatever reason mv_conc and dna_conc have to be ints
args = [pjoin(PRIMER3_HOME, 'oligotm'),
'-mv', str(mv_conc),
'-dv', str(dv_conc),
'-n', str(dntp_conc),
'-d', str(dna_conc),
'-tp', str(tm_meth),
'-sc', str(salt_meth),
seq]
tm = subprocess.check_output(args, stderr=DEV_NULL,
env=os.environ)
return float(tm) | Return the tm of `seq` as a float. | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/wrappers.py#L70-L94 |
libnano/primer3-py | primer3/wrappers.py | _parse_ntthal | def _parse_ntthal(ntthal_output):
''' Helper method that uses regex to parse ntthal output. '''
parsed_vals = re.search(_ntthal_re, ntthal_output)
return THERMORESULT(
True, # Structure found
float(parsed_vals.group(1)), # dS
float(parsed_vals.group(2)), # dH
float(parsed_vals.group(3)), # dG
float(parsed_vals.group(4)) # tm
) if parsed_vals else NULLTHERMORESULT | python | def _parse_ntthal(ntthal_output):
''' Helper method that uses regex to parse ntthal output. '''
parsed_vals = re.search(_ntthal_re, ntthal_output)
return THERMORESULT(
True, # Structure found
float(parsed_vals.group(1)), # dS
float(parsed_vals.group(2)), # dH
float(parsed_vals.group(3)), # dG
float(parsed_vals.group(4)) # tm
) if parsed_vals else NULLTHERMORESULT | Helper method that uses regex to parse ntthal output. | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/wrappers.py#L110-L119 |
libnano/primer3-py | primer3/wrappers.py | calcThermo | def calcThermo(seq1, seq2, calc_type='ANY', mv_conc=50, dv_conc=0,
dntp_conc=0.8, dna_conc=50, temp_c=37, max_loop=30,
temp_only=False):
""" Main subprocess wrapper for calls to the ntthal executable.
Returns a named tuple with tm, ds, dh, and dg values or None if no
structure / complex could be computed.
"""
args = [pjoin(PRIMER3_HOME, 'ntthal'),
'-a', str(calc_type),
'-mv', str(mv_conc),
'-dv', str(dv_conc),
'-n', str(dntp_conc),
'-d', str(dna_conc),
'-t', str(temp_c),
'-maxloop', str(max_loop),
'-path', THERMO_PATH,
'-s1', seq1,
'-s2', seq2]
if temp_only:
args += ['-r']
out = subprocess.check_output(args, stderr=DEV_NULL,
env=os.environ)
return _parse_ntthal(out) | python | def calcThermo(seq1, seq2, calc_type='ANY', mv_conc=50, dv_conc=0,
dntp_conc=0.8, dna_conc=50, temp_c=37, max_loop=30,
temp_only=False):
""" Main subprocess wrapper for calls to the ntthal executable.
Returns a named tuple with tm, ds, dh, and dg values or None if no
structure / complex could be computed.
"""
args = [pjoin(PRIMER3_HOME, 'ntthal'),
'-a', str(calc_type),
'-mv', str(mv_conc),
'-dv', str(dv_conc),
'-n', str(dntp_conc),
'-d', str(dna_conc),
'-t', str(temp_c),
'-maxloop', str(max_loop),
'-path', THERMO_PATH,
'-s1', seq1,
'-s2', seq2]
if temp_only:
args += ['-r']
out = subprocess.check_output(args, stderr=DEV_NULL,
env=os.environ)
return _parse_ntthal(out) | Main subprocess wrapper for calls to the ntthal executable.
Returns a named tuple with tm, ds, dh, and dg values or None if no
structure / complex could be computed. | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/wrappers.py#L122-L145 |
libnano/primer3-py | primer3/wrappers.py | calcHairpin | def calcHairpin(seq, mv_conc=50, dv_conc=0, dntp_conc=0.8, dna_conc=50,
temp_c=37, max_loop=30, temp_only=False):
''' Return a namedtuple of the dS, dH, dG, and Tm of any hairpin struct
present.
'''
return calcThermo(seq, seq, 'HAIRPIN', mv_conc, dv_conc, dntp_conc,
dna_conc, temp_c, max_loop, temp_only) | python | def calcHairpin(seq, mv_conc=50, dv_conc=0, dntp_conc=0.8, dna_conc=50,
temp_c=37, max_loop=30, temp_only=False):
''' Return a namedtuple of the dS, dH, dG, and Tm of any hairpin struct
present.
'''
return calcThermo(seq, seq, 'HAIRPIN', mv_conc, dv_conc, dntp_conc,
dna_conc, temp_c, max_loop, temp_only) | Return a namedtuple of the dS, dH, dG, and Tm of any hairpin struct
present. | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/wrappers.py#L148-L154 |
libnano/primer3-py | primer3/wrappers.py | calcHeterodimer | def calcHeterodimer(seq1, seq2, mv_conc=50, dv_conc=0, dntp_conc=0.8,
dna_conc=50, temp_c=37, max_loop=30, temp_only=False):
''' Return a tuple of the dS, dH, dG, and Tm of any predicted heterodimer.
'''
return calcThermo(seq1, seq2, 'ANY', mv_conc, dv_conc, dntp_conc,
dna_conc, temp_c, max_loop, temp_only) | python | def calcHeterodimer(seq1, seq2, mv_conc=50, dv_conc=0, dntp_conc=0.8,
dna_conc=50, temp_c=37, max_loop=30, temp_only=False):
''' Return a tuple of the dS, dH, dG, and Tm of any predicted heterodimer.
'''
return calcThermo(seq1, seq2, 'ANY', mv_conc, dv_conc, dntp_conc,
dna_conc, temp_c, max_loop, temp_only) | Return a tuple of the dS, dH, dG, and Tm of any predicted heterodimer. | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/wrappers.py#L157-L162 |
libnano/primer3-py | primer3/wrappers.py | assessOligo | def assessOligo(seq):
'''
Return the thermodynamic characteristics of hairpin/homodimer structures.
Returns a tuple of namedtuples (hairpin data, homodimer data) in which each
individual tuple is structured (dS, dH, dG, Tm).
'''
hairpin_out = calcHairpin(seq)
homodimer_out = calcHomodimer(seq)
return (hairpin_out, homodimer_out) | python | def assessOligo(seq):
'''
Return the thermodynamic characteristics of hairpin/homodimer structures.
Returns a tuple of namedtuples (hairpin data, homodimer data) in which each
individual tuple is structured (dS, dH, dG, Tm).
'''
hairpin_out = calcHairpin(seq)
homodimer_out = calcHomodimer(seq)
return (hairpin_out, homodimer_out) | Return the thermodynamic characteristics of hairpin/homodimer structures.
Returns a tuple of namedtuples (hairpin data, homodimer data) in which each
individual tuple is structured (dS, dH, dG, Tm). | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/wrappers.py#L181-L191 |
libnano/primer3-py | primer3/wrappers.py | designPrimers | def designPrimers(p3_args, input_log=None, output_log=None, err_log=None):
''' Return the raw primer3_core output for the provided primer3 args.
Returns an ordered dict of the boulderIO-format primer3 output file
'''
sp = subprocess.Popen([pjoin(PRIMER3_HOME, 'primer3_core')],
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.STDOUT)
p3_args.setdefault('PRIMER_THERMODYNAMIC_PARAMETERS_PATH',
pjoin(PRIMER3_HOME, 'primer3_config/'))
in_str = _formatBoulderIO(p3_args)
if input_log:
input_log.write(in_str)
input_log.flush()
out_str, err_str = sp.communicate(input=in_str)
if output_log:
output_log.write(out_str)
output_log.flush()
if err_log and err_str is not None:
err_log.write(err_str)
err_log.flush()
return _parseBoulderIO(out_str) | python | def designPrimers(p3_args, input_log=None, output_log=None, err_log=None):
''' Return the raw primer3_core output for the provided primer3 args.
Returns an ordered dict of the boulderIO-format primer3 output file
'''
sp = subprocess.Popen([pjoin(PRIMER3_HOME, 'primer3_core')],
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.STDOUT)
p3_args.setdefault('PRIMER_THERMODYNAMIC_PARAMETERS_PATH',
pjoin(PRIMER3_HOME, 'primer3_config/'))
in_str = _formatBoulderIO(p3_args)
if input_log:
input_log.write(in_str)
input_log.flush()
out_str, err_str = sp.communicate(input=in_str)
if output_log:
output_log.write(out_str)
output_log.flush()
if err_log and err_str is not None:
err_log.write(err_str)
err_log.flush()
return _parseBoulderIO(out_str) | Return the raw primer3_core output for the provided primer3 args.
Returns an ordered dict of the boulderIO-format primer3 output file | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/wrappers.py#L263-L284 |
libnano/primer3-py | setup.py | makeExecutable | def makeExecutable(fp):
''' Adds the executable bit to the file at filepath `fp`
'''
mode = ((os.stat(fp).st_mode) | 0o555) & 0o7777
setup_log.info("Adding executable bit to %s (mode is now %o)", fp, mode)
os.chmod(fp, mode) | python | def makeExecutable(fp):
''' Adds the executable bit to the file at filepath `fp`
'''
mode = ((os.stat(fp).st_mode) | 0o555) & 0o7777
setup_log.info("Adding executable bit to %s (mode is now %o)", fp, mode)
os.chmod(fp, mode) | Adds the executable bit to the file at filepath `fp` | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/setup.py#L111-L116 |
libnano/primer3-py | primer3/bindings.py | calcHairpin | def calcHairpin(seq, mv_conc=50.0, dv_conc=0.0, dntp_conc=0.8, dna_conc=50.0,
temp_c=37, max_loop=30):
''' Calculate the hairpin formation thermodynamics of a DNA sequence.
**Note that the maximum length of `seq` is 60 bp.** This is a cap suggested
by the Primer3 team as the longest reasonable sequence length for which
a two-state NN model produces reliable results (see primer3/src/libnano/thal.h:50).
Args:
seq (str): DNA sequence to analyze for hairpin formation
mv_conc (float/int, optional): Monovalent cation conc. (mM)
dv_conc (float/int, optional): Divalent cation conc. (mM)
dntp_conc (float/int, optional): dNTP conc. (mM)
dna_conc (float/int, optional): DNA conc. (nM)
temp_c (int, optional): Simulation temperature for dG (Celsius)
max_loop(int, optional): Maximum size of loops in the structure
Returns:
A `ThermoResult` object with thermodynamic characteristics of the
hairpin formation.
Raises:
``RuntimeError``
'''
_setThermoArgs(**locals())
return _THERMO_ANALYSIS.calcHairpin(seq).checkExc() | python | def calcHairpin(seq, mv_conc=50.0, dv_conc=0.0, dntp_conc=0.8, dna_conc=50.0,
temp_c=37, max_loop=30):
''' Calculate the hairpin formation thermodynamics of a DNA sequence.
**Note that the maximum length of `seq` is 60 bp.** This is a cap suggested
by the Primer3 team as the longest reasonable sequence length for which
a two-state NN model produces reliable results (see primer3/src/libnano/thal.h:50).
Args:
seq (str): DNA sequence to analyze for hairpin formation
mv_conc (float/int, optional): Monovalent cation conc. (mM)
dv_conc (float/int, optional): Divalent cation conc. (mM)
dntp_conc (float/int, optional): dNTP conc. (mM)
dna_conc (float/int, optional): DNA conc. (nM)
temp_c (int, optional): Simulation temperature for dG (Celsius)
max_loop(int, optional): Maximum size of loops in the structure
Returns:
A `ThermoResult` object with thermodynamic characteristics of the
hairpin formation.
Raises:
``RuntimeError``
'''
_setThermoArgs(**locals())
return _THERMO_ANALYSIS.calcHairpin(seq).checkExc() | Calculate the hairpin formation thermodynamics of a DNA sequence.
**Note that the maximum length of `seq` is 60 bp.** This is a cap suggested
by the Primer3 team as the longest reasonable sequence length for which
a two-state NN model produces reliable results (see primer3/src/libnano/thal.h:50).
Args:
seq (str): DNA sequence to analyze for hairpin formation
mv_conc (float/int, optional): Monovalent cation conc. (mM)
dv_conc (float/int, optional): Divalent cation conc. (mM)
dntp_conc (float/int, optional): dNTP conc. (mM)
dna_conc (float/int, optional): DNA conc. (nM)
temp_c (int, optional): Simulation temperature for dG (Celsius)
max_loop(int, optional): Maximum size of loops in the structure
Returns:
A `ThermoResult` object with thermodynamic characteristics of the
hairpin formation.
Raises:
``RuntimeError`` | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/bindings.py#L70-L97 |
libnano/primer3-py | primer3/bindings.py | calcEndStability | def calcEndStability(seq1, seq2, mv_conc=50, dv_conc=0, dntp_conc=0.8,
dna_conc=50, temp_c=37, max_loop=30):
''' Calculate the 3' end stability of DNA sequence `seq1` against DNA
sequence `seq2`.
**Note that at least one of the two sequences must by <60 bp in length.**
This is a cap imposed by Primer3 as the longest reasonable sequence length
for which a two-state NN model produces reliable results (see
primer3/src/libnano/thal.h:50).
Args:
seq1 (str) : DNA sequence to analyze for 3' end
hybridization against the target
sequence
seq2 (str) : Target DNA sequence to analyze for
seq1 3' end hybridization
mv_conc (float/int, optional) : Monovalent cation conc. (mM)
dv_conc (float/int, optional) : Divalent cation conc. (mM)
dntp_conc (float/int, optional) : dNTP conc. (mM)
dna_conc (float/int, optional) : DNA conc. (nM)
temp_c (int, optional) : Simulation temperature for dG (C)
max_loop(int, optional) : Maximum size of loops in the
structure
Returns:
A `ThermoResult` object with thermodynamic characteristics of the
3' hybridization interaction.
Raises:
``RuntimeError``
'''
_setThermoArgs(**locals())
return _THERMO_ANALYSIS.calcEndStability(seq1, seq2).checkExc() | python | def calcEndStability(seq1, seq2, mv_conc=50, dv_conc=0, dntp_conc=0.8,
dna_conc=50, temp_c=37, max_loop=30):
''' Calculate the 3' end stability of DNA sequence `seq1` against DNA
sequence `seq2`.
**Note that at least one of the two sequences must by <60 bp in length.**
This is a cap imposed by Primer3 as the longest reasonable sequence length
for which a two-state NN model produces reliable results (see
primer3/src/libnano/thal.h:50).
Args:
seq1 (str) : DNA sequence to analyze for 3' end
hybridization against the target
sequence
seq2 (str) : Target DNA sequence to analyze for
seq1 3' end hybridization
mv_conc (float/int, optional) : Monovalent cation conc. (mM)
dv_conc (float/int, optional) : Divalent cation conc. (mM)
dntp_conc (float/int, optional) : dNTP conc. (mM)
dna_conc (float/int, optional) : DNA conc. (nM)
temp_c (int, optional) : Simulation temperature for dG (C)
max_loop(int, optional) : Maximum size of loops in the
structure
Returns:
A `ThermoResult` object with thermodynamic characteristics of the
3' hybridization interaction.
Raises:
``RuntimeError``
'''
_setThermoArgs(**locals())
return _THERMO_ANALYSIS.calcEndStability(seq1, seq2).checkExc() | Calculate the 3' end stability of DNA sequence `seq1` against DNA
sequence `seq2`.
**Note that at least one of the two sequences must by <60 bp in length.**
This is a cap imposed by Primer3 as the longest reasonable sequence length
for which a two-state NN model produces reliable results (see
primer3/src/libnano/thal.h:50).
Args:
seq1 (str) : DNA sequence to analyze for 3' end
hybridization against the target
sequence
seq2 (str) : Target DNA sequence to analyze for
seq1 3' end hybridization
mv_conc (float/int, optional) : Monovalent cation conc. (mM)
dv_conc (float/int, optional) : Divalent cation conc. (mM)
dntp_conc (float/int, optional) : dNTP conc. (mM)
dna_conc (float/int, optional) : DNA conc. (nM)
temp_c (int, optional) : Simulation temperature for dG (C)
max_loop(int, optional) : Maximum size of loops in the
structure
Returns:
A `ThermoResult` object with thermodynamic characteristics of the
3' hybridization interaction.
Raises:
``RuntimeError`` | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/bindings.py#L167-L201 |
libnano/primer3-py | primer3/bindings.py | calcTm | def calcTm(seq, mv_conc=50, dv_conc=0, dntp_conc=0.8, dna_conc=50,
max_nn_length=60, tm_method='santalucia',
salt_corrections_method='santalucia'):
''' Calculate the melting temperature (Tm) of a DNA sequence.
Note that NN thermodynamics will be used to calculate the Tm of sequences
up to 60 bp in length, after which point the following formula will be
used::
Tm = 81.5 + 16.6(log10([mv_conc])) + 0.41(%GC) - 600/length
Args:
seq (str) : DNA sequence
mv_conc (float/int, optional) : Monovalent cation conc. (mM)
dv_conc (float/int, optional) : Divalent cation conc. (mM)
dntp_conc (float/int, optional) : dNTP conc. (mM)
dna_conc (float/int, optional) : DNA conc. (nM)
max_nn_length (int, optional) : Maximum length for
nearest-neighbor calcs
tm_method (str, optional) : Tm calculation method
(breslauer or santalucia)
salt_corrections_method (str, optional) : Salt correction method
(schildkraut, owczarzy,
santalucia)
Returns:
The melting temperature in degrees Celsius (float).
'''
_setThermoArgs(**locals())
return _THERMO_ANALYSIS.calcTm(seq) | python | def calcTm(seq, mv_conc=50, dv_conc=0, dntp_conc=0.8, dna_conc=50,
max_nn_length=60, tm_method='santalucia',
salt_corrections_method='santalucia'):
''' Calculate the melting temperature (Tm) of a DNA sequence.
Note that NN thermodynamics will be used to calculate the Tm of sequences
up to 60 bp in length, after which point the following formula will be
used::
Tm = 81.5 + 16.6(log10([mv_conc])) + 0.41(%GC) - 600/length
Args:
seq (str) : DNA sequence
mv_conc (float/int, optional) : Monovalent cation conc. (mM)
dv_conc (float/int, optional) : Divalent cation conc. (mM)
dntp_conc (float/int, optional) : dNTP conc. (mM)
dna_conc (float/int, optional) : DNA conc. (nM)
max_nn_length (int, optional) : Maximum length for
nearest-neighbor calcs
tm_method (str, optional) : Tm calculation method
(breslauer or santalucia)
salt_corrections_method (str, optional) : Salt correction method
(schildkraut, owczarzy,
santalucia)
Returns:
The melting temperature in degrees Celsius (float).
'''
_setThermoArgs(**locals())
return _THERMO_ANALYSIS.calcTm(seq) | Calculate the melting temperature (Tm) of a DNA sequence.
Note that NN thermodynamics will be used to calculate the Tm of sequences
up to 60 bp in length, after which point the following formula will be
used::
Tm = 81.5 + 16.6(log10([mv_conc])) + 0.41(%GC) - 600/length
Args:
seq (str) : DNA sequence
mv_conc (float/int, optional) : Monovalent cation conc. (mM)
dv_conc (float/int, optional) : Divalent cation conc. (mM)
dntp_conc (float/int, optional) : dNTP conc. (mM)
dna_conc (float/int, optional) : DNA conc. (nM)
max_nn_length (int, optional) : Maximum length for
nearest-neighbor calcs
tm_method (str, optional) : Tm calculation method
(breslauer or santalucia)
salt_corrections_method (str, optional) : Salt correction method
(schildkraut, owczarzy,
santalucia)
Returns:
The melting temperature in degrees Celsius (float). | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/bindings.py#L204-L234 |
libnano/primer3-py | primer3/bindings.py | designPrimers | def designPrimers(seq_args, global_args=None, misprime_lib=None,
mishyb_lib=None, debug=False):
''' Run the Primer3 design process.
If the global args have been previously set (either by a pervious
`designPrimers` call or by a `setGlobals` call), `designPrimers` may be
called with seqArgs alone (as a means of optimization).
Args:
seq_args (dict) : Primer3 sequence/design args as per
Primer3 docs
global_args (dict, optional) : Primer3 global args as per Primer3 docs
misprime_lib (dict, optional) : `Sequence name: sequence` dictionary
for mispriming checks.
mishyb_lib (dict, optional) : `Sequence name: sequence` dictionary
for mishybridization checks.
Returns:
A dictionary of Primer3 results (should be identical to the expected
BoulderIO output from primer3_main)
'''
if global_args:
primerdesign.setGlobals(global_args, misprime_lib, mishyb_lib)
primerdesign.setSeqArgs(seq_args)
return primerdesign.runDesign(debug) | python | def designPrimers(seq_args, global_args=None, misprime_lib=None,
mishyb_lib=None, debug=False):
''' Run the Primer3 design process.
If the global args have been previously set (either by a pervious
`designPrimers` call or by a `setGlobals` call), `designPrimers` may be
called with seqArgs alone (as a means of optimization).
Args:
seq_args (dict) : Primer3 sequence/design args as per
Primer3 docs
global_args (dict, optional) : Primer3 global args as per Primer3 docs
misprime_lib (dict, optional) : `Sequence name: sequence` dictionary
for mispriming checks.
mishyb_lib (dict, optional) : `Sequence name: sequence` dictionary
for mishybridization checks.
Returns:
A dictionary of Primer3 results (should be identical to the expected
BoulderIO output from primer3_main)
'''
if global_args:
primerdesign.setGlobals(global_args, misprime_lib, mishyb_lib)
primerdesign.setSeqArgs(seq_args)
return primerdesign.runDesign(debug) | Run the Primer3 design process.
If the global args have been previously set (either by a pervious
`designPrimers` call or by a `setGlobals` call), `designPrimers` may be
called with seqArgs alone (as a means of optimization).
Args:
seq_args (dict) : Primer3 sequence/design args as per
Primer3 docs
global_args (dict, optional) : Primer3 global args as per Primer3 docs
misprime_lib (dict, optional) : `Sequence name: sequence` dictionary
for mispriming checks.
mishyb_lib (dict, optional) : `Sequence name: sequence` dictionary
for mishybridization checks.
Returns:
A dictionary of Primer3 results (should be identical to the expected
BoulderIO output from primer3_main) | https://github.com/libnano/primer3-py/blob/0901c0ef3ac17afd69329d23db71136c00bcb635/primer3/bindings.py#L246-L272 |
mitodl/PyLmod | pylmod/gradebook.py | GradeBook.unravel_sections | def unravel_sections(section_data):
"""Unravels section type dictionary into flat list of sections with
section type set as an attribute.
Args:
section_data(dict): Data return from py:method::get_sections
Returns:
list: Flat list of sections with ``sectionType`` set to
type (i.e. recitation, lecture, etc)
"""
sections = []
for type, subsection_list in section_data.items():
for section in subsection_list:
section['sectionType'] = type
sections.append(section)
return sections | python | def unravel_sections(section_data):
"""Unravels section type dictionary into flat list of sections with
section type set as an attribute.
Args:
section_data(dict): Data return from py:method::get_sections
Returns:
list: Flat list of sections with ``sectionType`` set to
type (i.e. recitation, lecture, etc)
"""
sections = []
for type, subsection_list in section_data.items():
for section in subsection_list:
section['sectionType'] = type
sections.append(section)
return sections | Unravels section type dictionary into flat list of sections with
section type set as an attribute.
Args:
section_data(dict): Data return from py:method::get_sections
Returns:
list: Flat list of sections with ``sectionType`` set to
type (i.e. recitation, lecture, etc) | https://github.com/mitodl/PyLmod/blob/b798b86c33d1eb615e7cd4f3457b5c15da1d86e0/pylmod/gradebook.py#L64-L80 |
mitodl/PyLmod | pylmod/gradebook.py | GradeBook.unravel_staff | def unravel_staff(staff_data):
"""Unravels staff role dictionary into flat list of staff
members with ``role`` set as an attribute.
Args:
staff_data(dict): Data return from py:method::get_staff
Returns:
list: Flat list of staff members with ``role`` set to
role type (i.e. course_admin, instructor, TA, etc)
"""
staff_list = []
for role, staff_members in staff_data['data'].items():
for member in staff_members:
member['role'] = role
staff_list.append(member)
return staff_list | python | def unravel_staff(staff_data):
"""Unravels staff role dictionary into flat list of staff
members with ``role`` set as an attribute.
Args:
staff_data(dict): Data return from py:method::get_staff
Returns:
list: Flat list of staff members with ``role`` set to
role type (i.e. course_admin, instructor, TA, etc)
"""
staff_list = []
for role, staff_members in staff_data['data'].items():
for member in staff_members:
member['role'] = role
staff_list.append(member)
return staff_list | Unravels staff role dictionary into flat list of staff
members with ``role`` set as an attribute.
Args:
staff_data(dict): Data return from py:method::get_staff
Returns:
list: Flat list of staff members with ``role`` set to
role type (i.e. course_admin, instructor, TA, etc) | https://github.com/mitodl/PyLmod/blob/b798b86c33d1eb615e7cd4f3457b5c15da1d86e0/pylmod/gradebook.py#L83-L99 |
mitodl/PyLmod | pylmod/gradebook.py | GradeBook.get_gradebook_id | def get_gradebook_id(self, gbuuid):
"""Return gradebookid for a given gradebook uuid.
Args:
gbuuid (str): gradebook uuid, i.e. ``STELLAR:/project/gbngtest``
Raises:
PyLmodUnexpectedData: No gradebook id returned
requests.RequestException: Exception connection error
ValueError: Unable to decode response content
Returns:
str: value of gradebook id
"""
gradebook = self.get('gradebook', params={'uuid': gbuuid})
if 'data' not in gradebook:
failure_messsage = ('Error in get_gradebook_id '
'for {0} - no data'.format(
gradebook
))
log.error(failure_messsage)
raise PyLmodUnexpectedData(failure_messsage)
return gradebook['data']['gradebookId'] | python | def get_gradebook_id(self, gbuuid):
"""Return gradebookid for a given gradebook uuid.
Args:
gbuuid (str): gradebook uuid, i.e. ``STELLAR:/project/gbngtest``
Raises:
PyLmodUnexpectedData: No gradebook id returned
requests.RequestException: Exception connection error
ValueError: Unable to decode response content
Returns:
str: value of gradebook id
"""
gradebook = self.get('gradebook', params={'uuid': gbuuid})
if 'data' not in gradebook:
failure_messsage = ('Error in get_gradebook_id '
'for {0} - no data'.format(
gradebook
))
log.error(failure_messsage)
raise PyLmodUnexpectedData(failure_messsage)
return gradebook['data']['gradebookId'] | Return gradebookid for a given gradebook uuid.
Args:
gbuuid (str): gradebook uuid, i.e. ``STELLAR:/project/gbngtest``
Raises:
PyLmodUnexpectedData: No gradebook id returned
requests.RequestException: Exception connection error
ValueError: Unable to decode response content
Returns:
str: value of gradebook id | https://github.com/mitodl/PyLmod/blob/b798b86c33d1eb615e7cd4f3457b5c15da1d86e0/pylmod/gradebook.py#L101-L123 |
mitodl/PyLmod | pylmod/gradebook.py | GradeBook.get_options | def get_options(self, gradebook_id):
"""Get options for gradebook.
Get options dictionary for a gradebook. Options include gradebook
attributes.
Args:
gradebook_id (str): unique identifier for gradebook, i.e. ``2314``
Returns:
An example return value is:
.. code-block:: python
{
u'data':
{
u'accessLevel': u'class',
u'archived': False,
u'calc_on_approved_only': False,
u'configured': None,
u'courseName': u'',
u'courseNumber': u'mitxdemosite',
u'deriveOverallGrades': False,
u'gradebookEwsEnabled': False,
u'gradebookId': 1293808,
u'gradebookName': u'Gradebook for mitxdemosite',
u'gradebookReadOnly': False,
u'gradebookVisibleToAdvisors': False,
u'graders_change_approved': False,
u'hideExcuseButtonInUI': False,
u'homeworkBetaEnabled': False,
u'membershipQualifier': u'/project/mitxdemosite',
u'membershipSource': u'stellar',
u'student_sees_actual_grades': True,
u'student_sees_category_info': True,
u'student_sees_comments': True,
u'student_sees_cumulative_score': True,
u'student_sees_histograms': True,
u'student_sees_submissions': False,
u'ta_approves': False,
u'ta_change_approved': False,
u'ta_configures': False,
u'ta_edits': False,
u'use_grade_weighting': False,
u'usingAttendance': False,
u'versionCompatible': 4,
u'versionCompatibleString': u'General Availability'
},
}
"""
end_point = 'gradebook/options/{gradebookId}'.format(
gradebookId=gradebook_id or self.gradebook_id)
options = self.get(end_point)
return options['data'] | python | def get_options(self, gradebook_id):
"""Get options for gradebook.
Get options dictionary for a gradebook. Options include gradebook
attributes.
Args:
gradebook_id (str): unique identifier for gradebook, i.e. ``2314``
Returns:
An example return value is:
.. code-block:: python
{
u'data':
{
u'accessLevel': u'class',
u'archived': False,
u'calc_on_approved_only': False,
u'configured': None,
u'courseName': u'',
u'courseNumber': u'mitxdemosite',
u'deriveOverallGrades': False,
u'gradebookEwsEnabled': False,
u'gradebookId': 1293808,
u'gradebookName': u'Gradebook for mitxdemosite',
u'gradebookReadOnly': False,
u'gradebookVisibleToAdvisors': False,
u'graders_change_approved': False,
u'hideExcuseButtonInUI': False,
u'homeworkBetaEnabled': False,
u'membershipQualifier': u'/project/mitxdemosite',
u'membershipSource': u'stellar',
u'student_sees_actual_grades': True,
u'student_sees_category_info': True,
u'student_sees_comments': True,
u'student_sees_cumulative_score': True,
u'student_sees_histograms': True,
u'student_sees_submissions': False,
u'ta_approves': False,
u'ta_change_approved': False,
u'ta_configures': False,
u'ta_edits': False,
u'use_grade_weighting': False,
u'usingAttendance': False,
u'versionCompatible': 4,
u'versionCompatibleString': u'General Availability'
},
}
"""
end_point = 'gradebook/options/{gradebookId}'.format(
gradebookId=gradebook_id or self.gradebook_id)
options = self.get(end_point)
return options['data'] | Get options for gradebook.
Get options dictionary for a gradebook. Options include gradebook
attributes.
Args:
gradebook_id (str): unique identifier for gradebook, i.e. ``2314``
Returns:
An example return value is:
.. code-block:: python
{
u'data':
{
u'accessLevel': u'class',
u'archived': False,
u'calc_on_approved_only': False,
u'configured': None,
u'courseName': u'',
u'courseNumber': u'mitxdemosite',
u'deriveOverallGrades': False,
u'gradebookEwsEnabled': False,
u'gradebookId': 1293808,
u'gradebookName': u'Gradebook for mitxdemosite',
u'gradebookReadOnly': False,
u'gradebookVisibleToAdvisors': False,
u'graders_change_approved': False,
u'hideExcuseButtonInUI': False,
u'homeworkBetaEnabled': False,
u'membershipQualifier': u'/project/mitxdemosite',
u'membershipSource': u'stellar',
u'student_sees_actual_grades': True,
u'student_sees_category_info': True,
u'student_sees_comments': True,
u'student_sees_cumulative_score': True,
u'student_sees_histograms': True,
u'student_sees_submissions': False,
u'ta_approves': False,
u'ta_change_approved': False,
u'ta_configures': False,
u'ta_edits': False,
u'use_grade_weighting': False,
u'usingAttendance': False,
u'versionCompatible': 4,
u'versionCompatibleString': u'General Availability'
},
} | https://github.com/mitodl/PyLmod/blob/b798b86c33d1eb615e7cd4f3457b5c15da1d86e0/pylmod/gradebook.py#L125-L181 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.